W0909 14:34:07.211000 140590996850496 torch/fx/experimental/symbolic_shapes.py:4449] [0/0] xindex is not in var_ranges, defaulting to unknown range. V0909 14:34:26.415000 140590996850496 torch/_dynamo/guards.py:2169] [0/0] [__guards] GUARDS: V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] TREE_GUARD_MANAGER: V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] +- RootGuardManager V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | +- DEFAULT_DEVICE: utils_device.CURRENT_DEVICE == None # _dynamo/output_graph.py:460 in init_ambient_guards V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | +- GLOBAL_STATE: ___check_global_state() V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | +- GuardManager: source=L['self'], accessed_by=DictGetItemGuardAccessor(self) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | +- ID_MATCH: ___check_obj_id(L['self'], 140581773415408) # scale_lora_layers(self, lora_scale) # diffusers/src/diffusers/models/transformers/transformer_flux.py:436 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | +- GuardManager: source=L['self'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | +- GuardManager: source=L['self'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | +- ID_MATCH: ___check_obj_id(L['self'].training, 140591004393440) # scale_lora_layers(self, lora_scale) # diffusers/src/diffusers/models/transformers/transformer_flux.py:436 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | +- GuardManager: source=L['self']._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | +- GuardManager: source=L['self'].norm_out, accessed_by=DictGetItemGuardAccessor(norm_out) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | +- ID_MATCH: ___check_obj_id(L['self'].norm_out, 140581770788240) # hidden_states = self.norm_out(hidden_states, temb) # diffusers/src/diffusers/models/transformers/transformer_flux.py:548 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | +- GuardManager: source=L['self'].norm_out.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].norm_out.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | +- GuardManager: source=L['self'].norm_out.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].norm_out.training, 140591004393440) # hidden_states = self.norm_out(hidden_states, temb) # diffusers/src/diffusers/models/transformers/transformer_flux.py:548 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | +- GuardManager: source=L['self'].norm_out._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- GuardManager: source=L['self'].norm_out.norm, accessed_by=DictGetItemGuardAccessor(norm) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].norm_out.norm, 140581765087584) # x = self.norm(x) * (1 + scale)[:, None, :] + shift[:, None, :] # diffusers/src/diffusers/models/normalization.py:306 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- GuardManager: source=L['self'].norm_out.norm.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].norm_out.norm.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].norm_out.norm.training, 140591004393440) # x = self.norm(x) * (1 + scale)[:, None, :] + shift[:, None, :] # diffusers/src/diffusers/models/normalization.py:306 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- GuardManager: source=L['self'].norm_out.silu, accessed_by=DictGetItemGuardAccessor(silu) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].norm_out.silu, 140581765087488) # emb = self.linear(self.silu(conditioning_embedding).to(x.dtype)) # diffusers/src/diffusers/models/normalization.py:304 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- GuardManager: source=L['self'].norm_out.silu.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].norm_out.silu.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].norm_out.silu.training, 140591004393440) # emb = self.linear(self.silu(conditioning_embedding).to(x.dtype)) # diffusers/src/diffusers/models/normalization.py:304 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- GuardManager: source=L['self'].norm_out.linear, accessed_by=DictGetItemGuardAccessor(linear) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].norm_out.linear, 140581765087536) # emb = self.linear(self.silu(conditioning_embedding).to(x.dtype)) # diffusers/src/diffusers/models/normalization.py:304 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- GuardManager: source=L['self'].norm_out.linear.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].norm_out.linear.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].norm_out.linear.training, 140591004393440) # emb = self.linear(self.silu(conditioning_embedding).to(x.dtype)) # diffusers/src/diffusers/models/normalization.py:304 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | +- GuardManager: source=L['self'].norm_out._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | +- GuardManager: source=L['self'].norm_out._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | +- GuardManager: source=L['self'].norm_out._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | +- GuardManager: source=L['self'].norm_out._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | +- GuardManager: source=L['self'].proj_out, accessed_by=DictGetItemGuardAccessor(proj_out) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | +- ID_MATCH: ___check_obj_id(L['self'].proj_out, 140581765087440) # output = self.proj_out(hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:549 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | +- GuardManager: source=L['self'].proj_out.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | +- GuardManager: source=L['self'].proj_out.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].proj_out.training, 140591004393440) # output = self.proj_out(hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:549 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | +- GuardManager: source=L['self'].pos_embed, accessed_by=DictGetItemGuardAccessor(pos_embed) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | +- ID_MATCH: ___check_obj_id(L['self'].pos_embed, 140581773415024) # image_rotary_emb = self.pos_embed(ids) # diffusers/src/diffusers/models/transformers/transformer_flux.py:469 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | +- GuardManager: source=L['self'].pos_embed.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].pos_embed.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | +- GuardManager: source=L['self'].pos_embed.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].pos_embed.training, 140591004393440) # image_rotary_emb = self.pos_embed(ids) # diffusers/src/diffusers/models/transformers/transformer_flux.py:469 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | +- GuardManager: source=L['self'].pos_embed.axes_dim, accessed_by=DictGetItemGuardAccessor(axes_dim) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].pos_embed.axes_dim, 140591004488512) # self.axes_dim[i], pos[:, i], repeat_interleave_real=True, use_real=True, freqs_dtype=freqs_dtype # diffusers/src/diffusers/models/embeddings.py:698 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- LENGTH_CHECK: len(L['self'].pos_embed.axes_dim) == 3 # self.axes_dim[i], pos[:, i], repeat_interleave_real=True, use_real=True, freqs_dtype=freqs_dtype # diffusers/src/diffusers/models/embeddings.py:698 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- GuardManager: source=L['self'].pos_embed.axes_dim[0], accessed_by=TupleGetItemGuardAccessor(0) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- EQUALS_MATCH: L['self'].pos_embed.axes_dim[0] == 16 # cos, sin = get_1d_rotary_pos_embed( # diffusers/src/diffusers/models/embeddings.py:697 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- GuardManager: source=L['self'].pos_embed.axes_dim[1], accessed_by=TupleGetItemGuardAccessor(1) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- EQUALS_MATCH: L['self'].pos_embed.axes_dim[1] == 56 # cos, sin = get_1d_rotary_pos_embed( # diffusers/src/diffusers/models/embeddings.py:697 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- GuardManager: source=L['self'].pos_embed.axes_dim[2], accessed_by=TupleGetItemGuardAccessor(2) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- EQUALS_MATCH: L['self'].pos_embed.axes_dim[2] == 56 # cos, sin = get_1d_rotary_pos_embed( # diffusers/src/diffusers/models/embeddings.py:697 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | +- GuardManager: source=L['self'].pos_embed._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | +- GuardManager: source=L['self'].pos_embed._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | +- GuardManager: source=L['self'].pos_embed._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | +- GuardManager: source=L['self'].pos_embed._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | +- GuardManager: source=L['self'].x_embedder, accessed_by=DictGetItemGuardAccessor(x_embedder) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | +- ID_MATCH: ___check_obj_id(L['self'].x_embedder, 140581773423280) # hidden_states = self.x_embedder(hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:442 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | +- GuardManager: source=L['self'].x_embedder.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | +- GuardManager: source=L['self'].x_embedder.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].x_embedder.training, 140591004393440) # hidden_states = self.x_embedder(hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:442 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | +- GuardManager: source=L['self'].time_text_embed, accessed_by=DictGetItemGuardAccessor(time_text_embed) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | +- ID_MATCH: ___check_obj_id(L['self'].time_text_embed, 140581773422416) # else self.time_text_embed(timestep, guidance, pooled_projections) # diffusers/src/diffusers/models/transformers/transformer_flux.py:452 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | +- GuardManager: source=L['self'].time_text_embed.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].time_text_embed.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | +- GuardManager: source=L['self'].time_text_embed.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].time_text_embed.training, 140591004393440) # else self.time_text_embed(timestep, guidance, pooled_projections) # diffusers/src/diffusers/models/transformers/transformer_flux.py:452 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | +- GuardManager: source=L['self'].time_text_embed._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- GuardManager: source=L['self'].time_text_embed.time_proj, accessed_by=DictGetItemGuardAccessor(time_proj) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].time_text_embed.time_proj, 140581773415216) # timesteps_proj = self.time_proj(timestep) # diffusers/src/diffusers/models/embeddings.py:1059 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- GuardManager: source=L['self'].time_text_embed.time_proj.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].time_text_embed.time_proj.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].time_text_embed.time_proj.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].time_text_embed.time_proj.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].time_text_embed.time_proj.training, 140591004393440) # timesteps_proj = self.time_proj(timestep) # diffusers/src/diffusers/models/embeddings.py:1059 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].time_text_embed.time_proj.scale, accessed_by=DictGetItemGuardAccessor(scale) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- EQUALS_MATCH: L['self'].time_text_embed.time_proj.scale == 1 # scale=self.scale, # diffusers/src/diffusers/models/embeddings.py:769 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].time_text_embed.time_proj.num_channels, accessed_by=DictGetItemGuardAccessor(num_channels) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- EQUALS_MATCH: L['self'].time_text_embed.time_proj.num_channels == 256 # self.num_channels, # diffusers/src/diffusers/models/embeddings.py:766 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].time_text_embed.time_proj._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].time_text_embed.time_proj._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].time_text_embed.time_proj.flip_sin_to_cos, accessed_by=DictGetItemGuardAccessor(flip_sin_to_cos) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].time_text_embed.time_proj.flip_sin_to_cos, 140591004393408) # flip_sin_to_cos=self.flip_sin_to_cos, # diffusers/src/diffusers/models/embeddings.py:767 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].time_text_embed.time_proj._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].time_text_embed.time_proj._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].time_text_embed.time_proj.downscale_freq_shift, accessed_by=DictGetItemGuardAccessor(downscale_freq_shift) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- EQUALS_MATCH: L['self'].time_text_embed.time_proj.downscale_freq_shift == 0 # downscale_freq_shift=self.downscale_freq_shift, # diffusers/src/diffusers/models/embeddings.py:768 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- GuardManager: source=L['self'].time_text_embed.text_embedder, accessed_by=DictGetItemGuardAccessor(text_embedder) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].time_text_embed.text_embedder, 140581773415120) # pooled_projections = self.text_embedder(pooled_projection) # diffusers/src/diffusers/models/embeddings.py:1067 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- GuardManager: source=L['self'].time_text_embed.text_embedder.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].time_text_embed.text_embedder.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].time_text_embed.text_embedder.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].time_text_embed.text_embedder.training, 140591004393440) # pooled_projections = self.text_embedder(pooled_projection) # diffusers/src/diffusers/models/embeddings.py:1067 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].time_text_embed.text_embedder._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].time_text_embed.text_embedder.act_1, accessed_by=DictGetItemGuardAccessor(act_1) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].time_text_embed.text_embedder.act_1, 140581773423184) # hidden_states = self.act_1(hidden_states) # diffusers/src/diffusers/models/embeddings.py:1511 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].time_text_embed.text_embedder.act_1.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].time_text_embed.text_embedder.act_1.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].time_text_embed.text_embedder.act_1.training, 140591004393440) # hidden_states = self.act_1(hidden_states) # diffusers/src/diffusers/models/embeddings.py:1511 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].time_text_embed.text_embedder.linear_1, accessed_by=DictGetItemGuardAccessor(linear_1) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].time_text_embed.text_embedder.linear_1, 140581773422944) # hidden_states = self.linear_1(caption) # diffusers/src/diffusers/models/embeddings.py:1510 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].time_text_embed.text_embedder.linear_1.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].time_text_embed.text_embedder.linear_1.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].time_text_embed.text_embedder.linear_1.training, 140591004393440) # hidden_states = self.linear_1(caption) # diffusers/src/diffusers/models/embeddings.py:1510 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].time_text_embed.text_embedder.linear_2, accessed_by=DictGetItemGuardAccessor(linear_2) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].time_text_embed.text_embedder.linear_2, 140581773423232) # hidden_states = self.linear_2(hidden_states) # diffusers/src/diffusers/models/embeddings.py:1512 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].time_text_embed.text_embedder.linear_2.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].time_text_embed.text_embedder.linear_2.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].time_text_embed.text_embedder.linear_2.training, 140591004393440) # hidden_states = self.linear_2(hidden_states) # diffusers/src/diffusers/models/embeddings.py:1512 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].time_text_embed.text_embedder._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].time_text_embed.text_embedder._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].time_text_embed.text_embedder._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].time_text_embed.text_embedder._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- GuardManager: source=L['self'].time_text_embed.guidance_embedder, accessed_by=DictGetItemGuardAccessor(guidance_embedder) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].time_text_embed.guidance_embedder, 140581773422512) # guidance_emb = self.guidance_embedder(guidance_proj.to(dtype=pooled_projection.dtype)) # (N, D) # diffusers/src/diffusers/models/embeddings.py:1063 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- GuardManager: source=L['self'].time_text_embed.guidance_embedder.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].time_text_embed.guidance_embedder.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].time_text_embed.guidance_embedder.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].time_text_embed.guidance_embedder.training, 140591004393440) # guidance_emb = self.guidance_embedder(guidance_proj.to(dtype=pooled_projection.dtype)) # (N, D) # diffusers/src/diffusers/models/embeddings.py:1063 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].time_text_embed.guidance_embedder._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].time_text_embed.guidance_embedder.act, accessed_by=DictGetItemGuardAccessor(act) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].time_text_embed.guidance_embedder.act, 140585079194528) # if self.act is not None: # diffusers/src/diffusers/models/embeddings.py:745 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].time_text_embed.guidance_embedder.act.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].time_text_embed.guidance_embedder.act.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].time_text_embed.guidance_embedder.act.training, 140591004393440) # if self.act is not None: # diffusers/src/diffusers/models/embeddings.py:745 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].time_text_embed.guidance_embedder.linear_1, accessed_by=DictGetItemGuardAccessor(linear_1) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].time_text_embed.guidance_embedder.linear_1, 140581773422224) # sample = self.linear_1(sample) # diffusers/src/diffusers/models/embeddings.py:743 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].time_text_embed.guidance_embedder.linear_1.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].time_text_embed.guidance_embedder.linear_1.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].time_text_embed.guidance_embedder.linear_1.training, 140591004393440) # sample = self.linear_1(sample) # diffusers/src/diffusers/models/embeddings.py:743 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].time_text_embed.guidance_embedder.linear_2, accessed_by=DictGetItemGuardAccessor(linear_2) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].time_text_embed.guidance_embedder.linear_2, 140581773421840) # sample = self.linear_2(sample) # diffusers/src/diffusers/models/embeddings.py:748 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].time_text_embed.guidance_embedder.linear_2.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].time_text_embed.guidance_embedder.linear_2.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].time_text_embed.guidance_embedder.linear_2.training, 140591004393440) # sample = self.linear_2(sample) # diffusers/src/diffusers/models/embeddings.py:748 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].time_text_embed.guidance_embedder.post_act, accessed_by=DictGetItemGuardAccessor(post_act) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].time_text_embed.guidance_embedder.post_act, 140591004478624) # if self.post_act is not None: # diffusers/src/diffusers/models/embeddings.py:750 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].time_text_embed.guidance_embedder._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].time_text_embed.guidance_embedder._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].time_text_embed.guidance_embedder._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].time_text_embed.guidance_embedder._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- GuardManager: source=L['self'].time_text_embed.guidance_embedder.forward, accessed_by=GetAttrGuardAccessor(forward) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].time_text_embed.guidance_embedder.forward, accessed_by=FuncDefaultsGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].time_text_embed.guidance_embedder.forward.__defaults__[0], accessed_by=GetItemGuardAccessor(0) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].time_text_embed.guidance_embedder.forward.__defaults__[0], 140591004478624) # if condition is not None: # diffusers/src/diffusers/models/embeddings.py:741 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- GuardManager: source=L['self'].time_text_embed.timestep_embedder, accessed_by=DictGetItemGuardAccessor(timestep_embedder) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].time_text_embed.timestep_embedder, 140581773415072) # timesteps_emb = self.timestep_embedder(timesteps_proj.to(dtype=pooled_projection.dtype)) # (N, D) # diffusers/src/diffusers/models/embeddings.py:1060 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- GuardManager: source=L['self'].time_text_embed.timestep_embedder.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].time_text_embed.timestep_embedder.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].time_text_embed.timestep_embedder.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].time_text_embed.timestep_embedder.training, 140591004393440) # timesteps_emb = self.timestep_embedder(timesteps_proj.to(dtype=pooled_projection.dtype)) # (N, D) # diffusers/src/diffusers/models/embeddings.py:1060 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].time_text_embed.timestep_embedder._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].time_text_embed.timestep_embedder.act, accessed_by=DictGetItemGuardAccessor(act) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].time_text_embed.timestep_embedder.act, 140585079194528) # if self.act is not None: # diffusers/src/diffusers/models/embeddings.py:745 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].time_text_embed.timestep_embedder.act.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].time_text_embed.timestep_embedder.act.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].time_text_embed.timestep_embedder.act.training, 140591004393440) # if self.act is not None: # diffusers/src/diffusers/models/embeddings.py:745 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].time_text_embed.timestep_embedder.linear_1, accessed_by=DictGetItemGuardAccessor(linear_1) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].time_text_embed.timestep_embedder.linear_1, 140581773422992) # sample = self.linear_1(sample) # diffusers/src/diffusers/models/embeddings.py:743 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].time_text_embed.timestep_embedder.linear_1.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].time_text_embed.timestep_embedder.linear_1.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].time_text_embed.timestep_embedder.linear_1.training, 140591004393440) # sample = self.linear_1(sample) # diffusers/src/diffusers/models/embeddings.py:743 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].time_text_embed.timestep_embedder.linear_2, accessed_by=DictGetItemGuardAccessor(linear_2) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].time_text_embed.timestep_embedder.linear_2, 140581773422848) # sample = self.linear_2(sample) # diffusers/src/diffusers/models/embeddings.py:748 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].time_text_embed.timestep_embedder.linear_2.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].time_text_embed.timestep_embedder.linear_2.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].time_text_embed.timestep_embedder.linear_2.training, 140591004393440) # sample = self.linear_2(sample) # diffusers/src/diffusers/models/embeddings.py:748 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].time_text_embed.timestep_embedder.post_act, accessed_by=DictGetItemGuardAccessor(post_act) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].time_text_embed.timestep_embedder.post_act, 140591004478624) # if self.post_act is not None: # diffusers/src/diffusers/models/embeddings.py:750 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].time_text_embed.timestep_embedder._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].time_text_embed.timestep_embedder._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].time_text_embed.timestep_embedder._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].time_text_embed.timestep_embedder._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- GuardManager: source=L['self'].time_text_embed.timestep_embedder.forward, accessed_by=GetAttrGuardAccessor(forward) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].time_text_embed.timestep_embedder.forward, accessed_by=FuncDefaultsGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].time_text_embed.timestep_embedder.forward.__defaults__[0], accessed_by=GetItemGuardAccessor(0) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].time_text_embed.timestep_embedder.forward.__defaults__[0], 140591004478624) # if condition is not None: # diffusers/src/diffusers/models/embeddings.py:741 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | +- GuardManager: source=L['self'].time_text_embed._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | +- GuardManager: source=L['self'].time_text_embed._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | +- GuardManager: source=L['self'].time_text_embed._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | +- GuardManager: source=L['self'].time_text_embed._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | +- GuardManager: source=L['self'].context_embedder, accessed_by=DictGetItemGuardAccessor(context_embedder) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | +- ID_MATCH: ___check_obj_id(L['self'].context_embedder, 140581773423136) # encoder_hidden_states = self.context_embedder(encoder_hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:454 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | +- GuardManager: source=L['self'].context_embedder.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | +- GuardManager: source=L['self'].context_embedder.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].context_embedder.training, 140591004393440) # encoder_hidden_states = self.context_embedder(encoder_hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:454 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | +- GuardManager: source=L['self'].transformer_blocks, accessed_by=DictGetItemGuardAccessor(transformer_blocks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks, 140581773423376) # for index_block, block in enumerate(self.transformer_blocks): # diffusers/src/diffusers/models/transformers/transformer_flux.py:471 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | +- GuardManager: source=L['self'].transformer_blocks.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | +- GuardManager: source=L['self'].transformer_blocks.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks.training, 140591004393440) # for index_block, block in enumerate(self.transformer_blocks): # diffusers/src/diffusers/models/transformers/transformer_flux.py:471 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | +- GuardManager: source=L['self'].transformer_blocks[0], accessed_by=GetItemGuardAccessor(0) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0], 140581773423328) # for index_block, block in enumerate(self.transformer_blocks): # diffusers/src/diffusers/models/transformers/transformer_flux.py:471 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[0].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].training, 140591004393440) # for index_block, block in enumerate(self.transformer_blocks): # diffusers/src/diffusers/models/transformers/transformer_flux.py:471 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff, accessed_by=DictGetItemGuardAccessor(ff) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].ff, 140581773424768) # ff_output = self.ff(norm_hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:185 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[0].ff.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].ff.training, 140591004393440) # ff_output = self.ff(norm_hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:185 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff.net, accessed_by=DictGetItemGuardAccessor(net) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].ff.net, 140581773425008) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[0].ff.net, 93831537618768) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- LENGTH_CHECK: len(L['self'].transformer_blocks[0].ff.net) == 3 # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff.net.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff.net.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].ff.net.training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff.net[0], accessed_by=GetItemGuardAccessor(0) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].ff.net[0], 140581773424960) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff.net[0].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[0].ff.net[0].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff.net[0].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].ff.net[0].training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff.net[0]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff.net[0].proj, accessed_by=DictGetItemGuardAccessor(proj) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].ff.net[0].proj, 140581773425056) # hidden_states = self.proj(hidden_states) # diffusers/src/diffusers/models/activations.py:88 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff.net[0].proj.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff.net[0].proj.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].ff.net[0].proj.training, 140591004393440) # hidden_states = self.proj(hidden_states) # diffusers/src/diffusers/models/activations.py:88 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff.net[0].approximate, accessed_by=DictGetItemGuardAccessor(approximate) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[0].ff.net[0].approximate == 'tanh' # return F.gelu(gate, approximate=self.approximate) # diffusers/src/diffusers/models/activations.py:83 in gelu V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff.net[0]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff.net[0]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff.net[0]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff.net[0]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff.net[1], accessed_by=GetItemGuardAccessor(1) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].ff.net[1], 140581773425104) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff.net[1].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff.net[1].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].ff.net[1].training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff.net[2], accessed_by=GetItemGuardAccessor(2) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].ff.net[2], 140581773425152) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff.net[2].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff.net[2].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].ff.net[2].training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn, accessed_by=DictGetItemGuardAccessor(attn) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn, 140581773423952) # attn_output, context_attn_output = self.attn( # diffusers/src/diffusers/models/transformers/transformer_flux.py:172 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[0].attn.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.training, 140591004393440) # attn_output, context_attn_output = self.attn( # diffusers/src/diffusers/models/transformers/transformer_flux.py:172 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_k, accessed_by=DictGetItemGuardAccessor(to_k) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.to_k, 140581773424144) # key = attn.to_k(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1717 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.to_k.training, 140591004393440) # key = attn.to_k(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1717 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_q, accessed_by=DictGetItemGuardAccessor(to_q) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.to_q, 140581773424240) # query = attn.to_q(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1716 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.to_q.training, 140591004393440) # query = attn.to_q(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1716 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_v, accessed_by=DictGetItemGuardAccessor(to_v) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.to_v, 140581773424336) # value = attn.to_v(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1718 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_v.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_v.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.to_v.training, 140591004393440) # value = attn.to_v(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1718 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.norm_k, accessed_by=DictGetItemGuardAccessor(norm_k) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.norm_k, 140581773424192) # if attn.norm_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1729 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.norm_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[0].attn.norm_k.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.norm_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.norm_k.training, 140591004393440) # if attn.norm_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1729 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.norm_k.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[0].attn.norm_k.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.norm_k._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.norm_k.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.norm_k.weight, 140581772779872) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.norm_k._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.norm_k._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.norm_k._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.norm_k._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.norm_q, accessed_by=DictGetItemGuardAccessor(norm_q) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.norm_q, 140581773424096) # if attn.norm_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1727 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.norm_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[0].attn.norm_q.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.norm_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.norm_q.training, 140591004393440) # if attn.norm_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1727 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.norm_q.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[0].attn.norm_q.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.norm_q._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.norm_q.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.norm_q.weight, 140581906594960) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.norm_q._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.norm_q._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.norm_q._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.norm_q._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_out, accessed_by=DictGetItemGuardAccessor(to_out) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.to_out, 140581773424528) # hidden_states = attn.to_out[0](hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1776 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_out.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_out.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.to_out.training, 140591004393440) # hidden_states = attn.to_out[0](hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1776 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_out[0], accessed_by=GetItemGuardAccessor(0) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.to_out[0], 140581773424576) # hidden_states = attn.to_out[0](hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1776 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_out[0].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_out[0].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.to_out[0].training, 140591004393440) # hidden_states = attn.to_out[0](hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1776 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_out[1], accessed_by=GetItemGuardAccessor(1) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.to_out[1], 140581773424624) # hidden_states = attn.to_out[1](hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1778 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_out[1].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_out[1].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.to_out[1].training, 140591004393440) # hidden_states = attn.to_out[1](hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1778 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.add_k_proj, accessed_by=DictGetItemGuardAccessor(add_k_proj) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.add_k_proj, 140581773424384) # encoder_hidden_states_key_proj = attn.add_k_proj(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1736 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.add_k_proj.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.add_k_proj.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.add_k_proj.training, 140591004393440) # encoder_hidden_states_key_proj = attn.add_k_proj(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1736 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.add_q_proj, accessed_by=DictGetItemGuardAccessor(add_q_proj) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.add_q_proj, 140581773424480) # encoder_hidden_states_query_proj = attn.add_q_proj(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1735 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.add_q_proj.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.add_q_proj.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.add_q_proj.training, 140591004393440) # encoder_hidden_states_query_proj = attn.add_q_proj(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1735 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.add_v_proj, accessed_by=DictGetItemGuardAccessor(add_v_proj) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.add_v_proj, 140581773424432) # encoder_hidden_states_value_proj = attn.add_v_proj(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1737 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.add_v_proj.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.add_v_proj.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.add_v_proj.training, 140591004393440) # encoder_hidden_states_value_proj = attn.add_v_proj(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1737 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_add_out, accessed_by=DictGetItemGuardAccessor(to_add_out) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.to_add_out, 140581773424672) # encoder_hidden_states = attn.to_add_out(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1779 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_add_out.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_add_out.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.to_add_out.training, 140591004393440) # encoder_hidden_states = attn.to_add_out(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1779 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.norm_added_k, accessed_by=DictGetItemGuardAccessor(norm_added_k) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.norm_added_k, 140581773424816) # if attn.norm_added_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1751 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.norm_added_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[0].attn.norm_added_k.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.norm_added_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.norm_added_k.training, 140591004393440) # if attn.norm_added_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1751 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.norm_added_k.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[0].attn.norm_added_k.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.norm_added_k._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.norm_added_k.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.norm_added_k.weight, 140581766060672) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.norm_added_k._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.norm_added_k._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.norm_added_k._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.norm_added_k._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.norm_added_q, accessed_by=DictGetItemGuardAccessor(norm_added_q) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.norm_added_q, 140581773424720) # if attn.norm_added_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1749 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.norm_added_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[0].attn.norm_added_q.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.norm_added_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.norm_added_q.training, 140591004393440) # if attn.norm_added_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1749 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.norm_added_q.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[0].attn.norm_added_q.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.norm_added_q._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.norm_added_q.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.norm_added_q.weight, 140581765982592) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.norm_added_q._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.norm_added_q._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.norm_added_q._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.norm_added_q._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.heads, accessed_by=DictGetItemGuardAccessor(heads) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[0].attn.heads == 24 # head_dim = inner_dim // attn.heads # diffusers/src/diffusers/models/attention_processor.py:1721 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.processor, accessed_by=DictGetItemGuardAccessor(processor) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[0].attn.processor, 93831581524080) # attn_parameters = set(inspect.signature(self.processor.__call__).parameters.keys()) # diffusers/src/diffusers/models/attention_processor.py:479 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.processor, 140581773423904) # return self.processor( # diffusers/src/diffusers/models/attention_processor.py:490 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].norm1, accessed_by=DictGetItemGuardAccessor(norm1) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].norm1, 140581773423472) # norm_hidden_states, gate_msa, shift_mlp, scale_mlp, gate_mlp = self.norm1(hidden_states, emb=temb) # diffusers/src/diffusers/models/transformers/transformer_flux.py:165 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].norm1.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[0].norm1.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].norm1.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].norm1.training, 140591004393440) # norm_hidden_states, gate_msa, shift_mlp, scale_mlp, gate_mlp = self.norm1(hidden_states, emb=temb) # diffusers/src/diffusers/models/transformers/transformer_flux.py:165 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].norm1.emb, accessed_by=DictGetItemGuardAccessor(emb) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].norm1.emb, 140591004478624) # if self.emb is not None: # diffusers/src/diffusers/models/normalization.py:135 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].norm1._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].norm1.norm, accessed_by=DictGetItemGuardAccessor(norm) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].norm1.norm, 140581773423664) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:139 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].norm1.norm.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].norm1.norm.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].norm1.norm.training, 140591004393440) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:139 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].norm1.silu, accessed_by=DictGetItemGuardAccessor(silu) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].norm1.silu, 140581773423568) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].norm1.silu.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].norm1.silu.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].norm1.silu.training, 140591004393440) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].norm1.linear, accessed_by=DictGetItemGuardAccessor(linear) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].norm1.linear, 140581773423616) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].norm1.linear.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].norm1.linear.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].norm1.linear.training, 140591004393440) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].norm1._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].norm1._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].norm1._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].norm1._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].norm2, accessed_by=DictGetItemGuardAccessor(norm2) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].norm2, 140581773424864) # norm_hidden_states = self.norm2(hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:182 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].norm2.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].norm2.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].norm2.training, 140591004393440) # norm_hidden_states = self.norm2(hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:182 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff_context, accessed_by=DictGetItemGuardAccessor(ff_context) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].ff_context, 140581773425200) # context_ff_output = self.ff_context(norm_encoder_hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:198 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff_context.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[0].ff_context.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff_context.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].ff_context.training, 140591004393440) # context_ff_output = self.ff_context(norm_encoder_hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:198 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff_context._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff_context.net, accessed_by=DictGetItemGuardAccessor(net) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].ff_context.net, 140581773425344) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[0].ff_context.net, 93831537618768) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- LENGTH_CHECK: len(L['self'].transformer_blocks[0].ff_context.net) == 3 # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff_context.net.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff_context.net.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].ff_context.net.training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff_context.net[0], accessed_by=GetItemGuardAccessor(0) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].ff_context.net[0], 140581773425296) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff_context.net[0].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[0].ff_context.net[0].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff_context.net[0].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].ff_context.net[0].training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff_context.net[0]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff_context.net[0].proj, accessed_by=DictGetItemGuardAccessor(proj) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].ff_context.net[0].proj, 140581773425392) # hidden_states = self.proj(hidden_states) # diffusers/src/diffusers/models/activations.py:88 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff_context.net[0].proj.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff_context.net[0].proj.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].ff_context.net[0].proj.training, 140591004393440) # hidden_states = self.proj(hidden_states) # diffusers/src/diffusers/models/activations.py:88 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff_context.net[0].approximate, accessed_by=DictGetItemGuardAccessor(approximate) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[0].ff_context.net[0].approximate == 'tanh' # return F.gelu(gate, approximate=self.approximate) # diffusers/src/diffusers/models/activations.py:83 in gelu V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff_context.net[0]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff_context.net[0]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff_context.net[0]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff_context.net[0]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff_context.net[1], accessed_by=GetItemGuardAccessor(1) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].ff_context.net[1], 140581773425488) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff_context.net[1].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff_context.net[1].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].ff_context.net[1].training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff_context.net[2], accessed_by=GetItemGuardAccessor(2) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].ff_context.net[2], 140581773425536) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff_context.net[2].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff_context.net[2].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].ff_context.net[2].training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff_context._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff_context._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff_context._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff_context._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].norm1_context, accessed_by=DictGetItemGuardAccessor(norm1_context) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].norm1_context, 140581773423712) # norm_encoder_hidden_states, c_gate_msa, c_shift_mlp, c_scale_mlp, c_gate_mlp = self.norm1_context( # diffusers/src/diffusers/models/transformers/transformer_flux.py:167 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].norm1_context.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[0].norm1_context.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].norm1_context.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].norm1_context.training, 140591004393440) # norm_encoder_hidden_states, c_gate_msa, c_shift_mlp, c_scale_mlp, c_gate_mlp = self.norm1_context( # diffusers/src/diffusers/models/transformers/transformer_flux.py:167 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].norm1_context.emb, accessed_by=DictGetItemGuardAccessor(emb) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].norm1_context.emb, 140591004478624) # if self.emb is not None: # diffusers/src/diffusers/models/normalization.py:135 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].norm1_context._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].norm1_context.norm, accessed_by=DictGetItemGuardAccessor(norm) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].norm1_context.norm, 140581773423856) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:139 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].norm1_context.norm.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].norm1_context.norm.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].norm1_context.norm.training, 140591004393440) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:139 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].norm1_context.silu, accessed_by=DictGetItemGuardAccessor(silu) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].norm1_context.silu, 140581773423760) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].norm1_context.silu.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].norm1_context.silu.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].norm1_context.silu.training, 140591004393440) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].norm1_context.linear, accessed_by=DictGetItemGuardAccessor(linear) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].norm1_context.linear, 140581773423808) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].norm1_context.linear.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].norm1_context.linear.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].norm1_context.linear.training, 140591004393440) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].norm1_context._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].norm1_context._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].norm1_context._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].norm1_context._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].norm2_context, accessed_by=DictGetItemGuardAccessor(norm2_context) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].norm2_context, 140581773424912) # norm_encoder_hidden_states = self.norm2_context(encoder_hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:195 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].norm2_context.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].norm2_context.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].norm2_context.training, 140591004393440) # norm_encoder_hidden_states = self.norm2_context(encoder_hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:195 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | +- GuardManager: source=L['self'].transformer_blocks[1], accessed_by=GetItemGuardAccessor(1) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1], 140581773423424) # for index_block, block in enumerate(self.transformer_blocks): # diffusers/src/diffusers/models/transformers/transformer_flux.py:471 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[1].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].training, 140591004393440) # for index_block, block in enumerate(self.transformer_blocks): # diffusers/src/diffusers/models/transformers/transformer_flux.py:471 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff, accessed_by=DictGetItemGuardAccessor(ff) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].ff, 140581767528656) # ff_output = self.ff(norm_hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:185 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[1].ff.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].ff.training, 140591004393440) # ff_output = self.ff(norm_hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:185 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff.net, accessed_by=DictGetItemGuardAccessor(net) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].ff.net, 140581767528896) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[1].ff.net, 93831537618768) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- LENGTH_CHECK: len(L['self'].transformer_blocks[1].ff.net) == 3 # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff.net.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff.net.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].ff.net.training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff.net[0], accessed_by=GetItemGuardAccessor(0) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].ff.net[0], 140581767528848) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff.net[0].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[1].ff.net[0].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff.net[0].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].ff.net[0].training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff.net[0]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff.net[0].proj, accessed_by=DictGetItemGuardAccessor(proj) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].ff.net[0].proj, 140581767528944) # hidden_states = self.proj(hidden_states) # diffusers/src/diffusers/models/activations.py:88 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff.net[0].proj.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff.net[0].proj.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].ff.net[0].proj.training, 140591004393440) # hidden_states = self.proj(hidden_states) # diffusers/src/diffusers/models/activations.py:88 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff.net[0].approximate, accessed_by=DictGetItemGuardAccessor(approximate) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[1].ff.net[0].approximate == 'tanh' # return F.gelu(gate, approximate=self.approximate) # diffusers/src/diffusers/models/activations.py:83 in gelu V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff.net[0]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff.net[0]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff.net[0]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff.net[0]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff.net[1], accessed_by=GetItemGuardAccessor(1) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].ff.net[1], 140581767528992) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff.net[1].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff.net[1].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].ff.net[1].training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff.net[2], accessed_by=GetItemGuardAccessor(2) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].ff.net[2], 140581767529040) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff.net[2].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff.net[2].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].ff.net[2].training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn, accessed_by=DictGetItemGuardAccessor(attn) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn, 140581773426064) # attn_output, context_attn_output = self.attn( # diffusers/src/diffusers/models/transformers/transformer_flux.py:172 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[1].attn.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.training, 140591004393440) # attn_output, context_attn_output = self.attn( # diffusers/src/diffusers/models/transformers/transformer_flux.py:172 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_k, accessed_by=DictGetItemGuardAccessor(to_k) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.to_k, 140581773426208) # key = attn.to_k(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1717 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.to_k.training, 140591004393440) # key = attn.to_k(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1717 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_q, accessed_by=DictGetItemGuardAccessor(to_q) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.to_q, 140581773426304) # query = attn.to_q(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1716 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.to_q.training, 140591004393440) # query = attn.to_q(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1716 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_v, accessed_by=DictGetItemGuardAccessor(to_v) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.to_v, 140581773426400) # value = attn.to_v(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1718 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_v.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_v.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.to_v.training, 140591004393440) # value = attn.to_v(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1718 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.norm_k, accessed_by=DictGetItemGuardAccessor(norm_k) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.norm_k, 140581773426256) # if attn.norm_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1729 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.norm_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[1].attn.norm_k.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.norm_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.norm_k.training, 140591004393440) # if attn.norm_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1729 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.norm_k.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[1].attn.norm_k.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.norm_k._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.norm_k.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.norm_k.weight, 140581785356144) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.norm_k._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.norm_k._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.norm_k._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.norm_k._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.norm_q, accessed_by=DictGetItemGuardAccessor(norm_q) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.norm_q, 140581773426160) # if attn.norm_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1727 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.norm_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[1].attn.norm_q.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.norm_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.norm_q.training, 140591004393440) # if attn.norm_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1727 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.norm_q.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[1].attn.norm_q.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.norm_q._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.norm_q.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.norm_q.weight, 140581765888128) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.norm_q._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.norm_q._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.norm_q._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.norm_q._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_out, accessed_by=DictGetItemGuardAccessor(to_out) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.to_out, 140581773426592) # hidden_states = attn.to_out[0](hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1776 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_out.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_out.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.to_out.training, 140591004393440) # hidden_states = attn.to_out[0](hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1776 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_out[0], accessed_by=GetItemGuardAccessor(0) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.to_out[0], 140581773426640) # hidden_states = attn.to_out[0](hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1776 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_out[0].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_out[0].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.to_out[0].training, 140591004393440) # hidden_states = attn.to_out[0](hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1776 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_out[1], accessed_by=GetItemGuardAccessor(1) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.to_out[1], 140581767528512) # hidden_states = attn.to_out[1](hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1778 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_out[1].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_out[1].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.to_out[1].training, 140591004393440) # hidden_states = attn.to_out[1](hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1778 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.add_k_proj, accessed_by=DictGetItemGuardAccessor(add_k_proj) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.add_k_proj, 140581773426448) # encoder_hidden_states_key_proj = attn.add_k_proj(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1736 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.add_k_proj.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.add_k_proj.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.add_k_proj.training, 140591004393440) # encoder_hidden_states_key_proj = attn.add_k_proj(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1736 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.add_q_proj, accessed_by=DictGetItemGuardAccessor(add_q_proj) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.add_q_proj, 140581773426544) # encoder_hidden_states_query_proj = attn.add_q_proj(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1735 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.add_q_proj.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.add_q_proj.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.add_q_proj.training, 140591004393440) # encoder_hidden_states_query_proj = attn.add_q_proj(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1735 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.add_v_proj, accessed_by=DictGetItemGuardAccessor(add_v_proj) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.add_v_proj, 140581773426496) # encoder_hidden_states_value_proj = attn.add_v_proj(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1737 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.add_v_proj.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.add_v_proj.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.add_v_proj.training, 140591004393440) # encoder_hidden_states_value_proj = attn.add_v_proj(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1737 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_add_out, accessed_by=DictGetItemGuardAccessor(to_add_out) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.to_add_out, 140581767528560) # encoder_hidden_states = attn.to_add_out(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1779 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_add_out.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_add_out.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.to_add_out.training, 140591004393440) # encoder_hidden_states = attn.to_add_out(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1779 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.norm_added_k, accessed_by=DictGetItemGuardAccessor(norm_added_k) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.norm_added_k, 140581767528704) # if attn.norm_added_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1751 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.norm_added_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[1].attn.norm_added_k.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.norm_added_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.norm_added_k.training, 140591004393440) # if attn.norm_added_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1751 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.norm_added_k.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[1].attn.norm_added_k.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.norm_added_k._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.norm_added_k.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.norm_added_k.weight, 140581785356064) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.norm_added_k._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.norm_added_k._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.norm_added_k._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.norm_added_k._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.norm_added_q, accessed_by=DictGetItemGuardAccessor(norm_added_q) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.norm_added_q, 140581767528608) # if attn.norm_added_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1749 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.norm_added_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[1].attn.norm_added_q.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.norm_added_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.norm_added_q.training, 140591004393440) # if attn.norm_added_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1749 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.norm_added_q.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[1].attn.norm_added_q.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.norm_added_q._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.norm_added_q.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.norm_added_q.weight, 140581774377824) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.norm_added_q._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.norm_added_q._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.norm_added_q._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.norm_added_q._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.heads, accessed_by=DictGetItemGuardAccessor(heads) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[1].attn.heads == 24 # head_dim = inner_dim // attn.heads # diffusers/src/diffusers/models/attention_processor.py:1721 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.processor, accessed_by=DictGetItemGuardAccessor(processor) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[1].attn.processor, 93831581524080) # attn_parameters = set(inspect.signature(self.processor.__call__).parameters.keys()) # diffusers/src/diffusers/models/attention_processor.py:479 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.processor, 140581773426016) # return self.processor( # diffusers/src/diffusers/models/attention_processor.py:490 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].norm1, accessed_by=DictGetItemGuardAccessor(norm1) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].norm1, 140581773425584) # norm_hidden_states, gate_msa, shift_mlp, scale_mlp, gate_mlp = self.norm1(hidden_states, emb=temb) # diffusers/src/diffusers/models/transformers/transformer_flux.py:165 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].norm1.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[1].norm1.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].norm1.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].norm1.training, 140591004393440) # norm_hidden_states, gate_msa, shift_mlp, scale_mlp, gate_mlp = self.norm1(hidden_states, emb=temb) # diffusers/src/diffusers/models/transformers/transformer_flux.py:165 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].norm1.emb, accessed_by=DictGetItemGuardAccessor(emb) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].norm1.emb, 140591004478624) # if self.emb is not None: # diffusers/src/diffusers/models/normalization.py:135 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].norm1._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].norm1.norm, accessed_by=DictGetItemGuardAccessor(norm) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].norm1.norm, 140581773425728) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:139 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].norm1.norm.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].norm1.norm.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].norm1.norm.training, 140591004393440) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:139 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].norm1.silu, accessed_by=DictGetItemGuardAccessor(silu) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].norm1.silu, 140581773425632) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].norm1.silu.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].norm1.silu.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].norm1.silu.training, 140591004393440) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].norm1.linear, accessed_by=DictGetItemGuardAccessor(linear) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].norm1.linear, 140581773425680) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].norm1.linear.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].norm1.linear.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].norm1.linear.training, 140591004393440) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].norm1._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].norm1._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].norm1._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].norm1._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].norm2, accessed_by=DictGetItemGuardAccessor(norm2) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].norm2, 140581767528752) # norm_hidden_states = self.norm2(hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:182 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].norm2.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].norm2.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].norm2.training, 140591004393440) # norm_hidden_states = self.norm2(hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:182 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff_context, accessed_by=DictGetItemGuardAccessor(ff_context) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].ff_context, 140581767529088) # context_ff_output = self.ff_context(norm_encoder_hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:198 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff_context.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[1].ff_context.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff_context.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].ff_context.training, 140591004393440) # context_ff_output = self.ff_context(norm_encoder_hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:198 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff_context._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff_context.net, accessed_by=DictGetItemGuardAccessor(net) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].ff_context.net, 140581767529232) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[1].ff_context.net, 93831537618768) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- LENGTH_CHECK: len(L['self'].transformer_blocks[1].ff_context.net) == 3 # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff_context.net.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff_context.net.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].ff_context.net.training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff_context.net[0], accessed_by=GetItemGuardAccessor(0) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].ff_context.net[0], 140581767529184) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff_context.net[0].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[1].ff_context.net[0].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff_context.net[0].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].ff_context.net[0].training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff_context.net[0]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff_context.net[0].proj, accessed_by=DictGetItemGuardAccessor(proj) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].ff_context.net[0].proj, 140581767529280) # hidden_states = self.proj(hidden_states) # diffusers/src/diffusers/models/activations.py:88 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff_context.net[0].proj.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff_context.net[0].proj.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].ff_context.net[0].proj.training, 140591004393440) # hidden_states = self.proj(hidden_states) # diffusers/src/diffusers/models/activations.py:88 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff_context.net[0].approximate, accessed_by=DictGetItemGuardAccessor(approximate) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[1].ff_context.net[0].approximate == 'tanh' # return F.gelu(gate, approximate=self.approximate) # diffusers/src/diffusers/models/activations.py:83 in gelu V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff_context.net[0]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff_context.net[0]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff_context.net[0]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff_context.net[0]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff_context.net[1], accessed_by=GetItemGuardAccessor(1) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].ff_context.net[1], 140581767529376) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff_context.net[1].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff_context.net[1].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].ff_context.net[1].training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff_context.net[2], accessed_by=GetItemGuardAccessor(2) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].ff_context.net[2], 140581767529424) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff_context.net[2].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff_context.net[2].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].ff_context.net[2].training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff_context._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff_context._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff_context._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff_context._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].norm1_context, accessed_by=DictGetItemGuardAccessor(norm1_context) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].norm1_context, 140581773425776) # norm_encoder_hidden_states, c_gate_msa, c_shift_mlp, c_scale_mlp, c_gate_mlp = self.norm1_context( # diffusers/src/diffusers/models/transformers/transformer_flux.py:167 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].norm1_context.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[1].norm1_context.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].norm1_context.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].norm1_context.training, 140591004393440) # norm_encoder_hidden_states, c_gate_msa, c_shift_mlp, c_scale_mlp, c_gate_mlp = self.norm1_context( # diffusers/src/diffusers/models/transformers/transformer_flux.py:167 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].norm1_context.emb, accessed_by=DictGetItemGuardAccessor(emb) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].norm1_context.emb, 140591004478624) # if self.emb is not None: # diffusers/src/diffusers/models/normalization.py:135 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].norm1_context._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].norm1_context.norm, accessed_by=DictGetItemGuardAccessor(norm) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].norm1_context.norm, 140581773425968) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:139 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].norm1_context.norm.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].norm1_context.norm.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].norm1_context.norm.training, 140591004393440) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:139 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].norm1_context.silu, accessed_by=DictGetItemGuardAccessor(silu) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].norm1_context.silu, 140581773425872) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].norm1_context.silu.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].norm1_context.silu.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].norm1_context.silu.training, 140591004393440) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].norm1_context.linear, accessed_by=DictGetItemGuardAccessor(linear) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].norm1_context.linear, 140581773425920) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].norm1_context.linear.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].norm1_context.linear.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].norm1_context.linear.training, 140591004393440) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].norm1_context._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].norm1_context._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].norm1_context._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].norm1_context._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].norm2_context, accessed_by=DictGetItemGuardAccessor(norm2_context) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].norm2_context, 140581767528800) # norm_encoder_hidden_states = self.norm2_context(encoder_hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:195 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].norm2_context.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].norm2_context.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].norm2_context.training, 140591004393440) # norm_encoder_hidden_states = self.norm2_context(encoder_hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:195 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | +- GuardManager: source=L['self'].transformer_blocks[2], accessed_by=GetItemGuardAccessor(2) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2], 140581773425248) # for index_block, block in enumerate(self.transformer_blocks): # diffusers/src/diffusers/models/transformers/transformer_flux.py:471 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[2].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].training, 140591004393440) # for index_block, block in enumerate(self.transformer_blocks): # diffusers/src/diffusers/models/transformers/transformer_flux.py:471 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff, accessed_by=DictGetItemGuardAccessor(ff) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].ff, 140581767530720) # ff_output = self.ff(norm_hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:185 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[2].ff.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].ff.training, 140591004393440) # ff_output = self.ff(norm_hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:185 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff.net, accessed_by=DictGetItemGuardAccessor(net) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].ff.net, 140581767530960) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[2].ff.net, 93831537618768) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- LENGTH_CHECK: len(L['self'].transformer_blocks[2].ff.net) == 3 # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff.net.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff.net.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].ff.net.training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff.net[0], accessed_by=GetItemGuardAccessor(0) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].ff.net[0], 140581767530912) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff.net[0].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[2].ff.net[0].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff.net[0].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].ff.net[0].training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff.net[0]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff.net[0].proj, accessed_by=DictGetItemGuardAccessor(proj) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].ff.net[0].proj, 140581767531008) # hidden_states = self.proj(hidden_states) # diffusers/src/diffusers/models/activations.py:88 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff.net[0].proj.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff.net[0].proj.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].ff.net[0].proj.training, 140591004393440) # hidden_states = self.proj(hidden_states) # diffusers/src/diffusers/models/activations.py:88 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff.net[0].approximate, accessed_by=DictGetItemGuardAccessor(approximate) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[2].ff.net[0].approximate == 'tanh' # return F.gelu(gate, approximate=self.approximate) # diffusers/src/diffusers/models/activations.py:83 in gelu V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff.net[0]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff.net[0]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff.net[0]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff.net[0]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff.net[1], accessed_by=GetItemGuardAccessor(1) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].ff.net[1], 140581767531056) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff.net[1].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff.net[1].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].ff.net[1].training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff.net[2], accessed_by=GetItemGuardAccessor(2) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].ff.net[2], 140581767531104) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff.net[2].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff.net[2].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].ff.net[2].training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn, accessed_by=DictGetItemGuardAccessor(attn) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn, 140581767529952) # attn_output, context_attn_output = self.attn( # diffusers/src/diffusers/models/transformers/transformer_flux.py:172 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[2].attn.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.training, 140591004393440) # attn_output, context_attn_output = self.attn( # diffusers/src/diffusers/models/transformers/transformer_flux.py:172 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_k, accessed_by=DictGetItemGuardAccessor(to_k) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.to_k, 140581767530096) # key = attn.to_k(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1717 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.to_k.training, 140591004393440) # key = attn.to_k(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1717 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_q, accessed_by=DictGetItemGuardAccessor(to_q) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.to_q, 140581767530192) # query = attn.to_q(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1716 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.to_q.training, 140591004393440) # query = attn.to_q(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1716 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_v, accessed_by=DictGetItemGuardAccessor(to_v) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.to_v, 140581767530288) # value = attn.to_v(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1718 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_v.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_v.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.to_v.training, 140591004393440) # value = attn.to_v(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1718 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.norm_k, accessed_by=DictGetItemGuardAccessor(norm_k) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.norm_k, 140581767530144) # if attn.norm_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1729 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.norm_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[2].attn.norm_k.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.norm_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.norm_k.training, 140591004393440) # if attn.norm_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1729 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.norm_k.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[2].attn.norm_k.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.norm_k._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.norm_k.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.norm_k.weight, 140581772708016) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.norm_k._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.norm_k._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.norm_k._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.norm_k._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.norm_q, accessed_by=DictGetItemGuardAccessor(norm_q) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.norm_q, 140581767530048) # if attn.norm_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1727 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.norm_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[2].attn.norm_q.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.norm_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.norm_q.training, 140591004393440) # if attn.norm_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1727 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.norm_q.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[2].attn.norm_q.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.norm_q._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.norm_q.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.norm_q.weight, 140581773245904) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.norm_q._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.norm_q._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.norm_q._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.norm_q._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_out, accessed_by=DictGetItemGuardAccessor(to_out) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.to_out, 140581767530480) # hidden_states = attn.to_out[0](hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1776 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_out.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_out.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.to_out.training, 140591004393440) # hidden_states = attn.to_out[0](hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1776 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_out[0], accessed_by=GetItemGuardAccessor(0) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.to_out[0], 140581767530528) # hidden_states = attn.to_out[0](hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1776 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_out[0].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_out[0].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.to_out[0].training, 140591004393440) # hidden_states = attn.to_out[0](hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1776 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_out[1], accessed_by=GetItemGuardAccessor(1) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.to_out[1], 140581767530576) # hidden_states = attn.to_out[1](hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1778 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_out[1].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_out[1].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.to_out[1].training, 140591004393440) # hidden_states = attn.to_out[1](hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1778 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.add_k_proj, accessed_by=DictGetItemGuardAccessor(add_k_proj) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.add_k_proj, 140581767530336) # encoder_hidden_states_key_proj = attn.add_k_proj(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1736 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.add_k_proj.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.add_k_proj.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.add_k_proj.training, 140591004393440) # encoder_hidden_states_key_proj = attn.add_k_proj(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1736 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.add_q_proj, accessed_by=DictGetItemGuardAccessor(add_q_proj) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.add_q_proj, 140581767530432) # encoder_hidden_states_query_proj = attn.add_q_proj(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1735 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.add_q_proj.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.add_q_proj.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.add_q_proj.training, 140591004393440) # encoder_hidden_states_query_proj = attn.add_q_proj(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1735 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.add_v_proj, accessed_by=DictGetItemGuardAccessor(add_v_proj) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.add_v_proj, 140581767530384) # encoder_hidden_states_value_proj = attn.add_v_proj(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1737 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.add_v_proj.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.add_v_proj.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.add_v_proj.training, 140591004393440) # encoder_hidden_states_value_proj = attn.add_v_proj(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1737 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_add_out, accessed_by=DictGetItemGuardAccessor(to_add_out) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.to_add_out, 140581767530624) # encoder_hidden_states = attn.to_add_out(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1779 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_add_out.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_add_out.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.to_add_out.training, 140591004393440) # encoder_hidden_states = attn.to_add_out(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1779 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.norm_added_k, accessed_by=DictGetItemGuardAccessor(norm_added_k) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.norm_added_k, 140581767530768) # if attn.norm_added_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1751 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.norm_added_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[2].attn.norm_added_k.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.norm_added_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.norm_added_k.training, 140591004393440) # if attn.norm_added_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1751 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.norm_added_k.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[2].attn.norm_added_k.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.norm_added_k._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.norm_added_k.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.norm_added_k.weight, 140581766000416) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.norm_added_k._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.norm_added_k._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.norm_added_k._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.norm_added_k._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.norm_added_q, accessed_by=DictGetItemGuardAccessor(norm_added_q) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.norm_added_q, 140581767530672) # if attn.norm_added_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1749 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.norm_added_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[2].attn.norm_added_q.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.norm_added_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.norm_added_q.training, 140591004393440) # if attn.norm_added_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1749 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.norm_added_q.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[2].attn.norm_added_q.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.norm_added_q._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.norm_added_q.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.norm_added_q.weight, 140581766000496) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.norm_added_q._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.norm_added_q._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.norm_added_q._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.norm_added_q._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.heads, accessed_by=DictGetItemGuardAccessor(heads) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[2].attn.heads == 24 # head_dim = inner_dim // attn.heads # diffusers/src/diffusers/models/attention_processor.py:1721 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.processor, accessed_by=DictGetItemGuardAccessor(processor) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[2].attn.processor, 93831581524080) # attn_parameters = set(inspect.signature(self.processor.__call__).parameters.keys()) # diffusers/src/diffusers/models/attention_processor.py:479 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.processor, 140581767529904) # return self.processor( # diffusers/src/diffusers/models/attention_processor.py:490 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].norm1, accessed_by=DictGetItemGuardAccessor(norm1) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].norm1, 140581767529472) # norm_hidden_states, gate_msa, shift_mlp, scale_mlp, gate_mlp = self.norm1(hidden_states, emb=temb) # diffusers/src/diffusers/models/transformers/transformer_flux.py:165 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].norm1.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[2].norm1.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].norm1.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].norm1.training, 140591004393440) # norm_hidden_states, gate_msa, shift_mlp, scale_mlp, gate_mlp = self.norm1(hidden_states, emb=temb) # diffusers/src/diffusers/models/transformers/transformer_flux.py:165 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].norm1.emb, accessed_by=DictGetItemGuardAccessor(emb) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].norm1.emb, 140591004478624) # if self.emb is not None: # diffusers/src/diffusers/models/normalization.py:135 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].norm1._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].norm1.norm, accessed_by=DictGetItemGuardAccessor(norm) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].norm1.norm, 140581767529616) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:139 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].norm1.norm.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].norm1.norm.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].norm1.norm.training, 140591004393440) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:139 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].norm1.silu, accessed_by=DictGetItemGuardAccessor(silu) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].norm1.silu, 140581767529520) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].norm1.silu.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].norm1.silu.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].norm1.silu.training, 140591004393440) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].norm1.linear, accessed_by=DictGetItemGuardAccessor(linear) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].norm1.linear, 140581767529568) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].norm1.linear.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].norm1.linear.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].norm1.linear.training, 140591004393440) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].norm1._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].norm1._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].norm1._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].norm1._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].norm2, accessed_by=DictGetItemGuardAccessor(norm2) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].norm2, 140581767530816) # norm_hidden_states = self.norm2(hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:182 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].norm2.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].norm2.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].norm2.training, 140591004393440) # norm_hidden_states = self.norm2(hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:182 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff_context, accessed_by=DictGetItemGuardAccessor(ff_context) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].ff_context, 140581767531152) # context_ff_output = self.ff_context(norm_encoder_hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:198 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff_context.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[2].ff_context.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff_context.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].ff_context.training, 140591004393440) # context_ff_output = self.ff_context(norm_encoder_hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:198 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff_context._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff_context.net, accessed_by=DictGetItemGuardAccessor(net) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].ff_context.net, 140581767531296) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[2].ff_context.net, 93831537618768) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- LENGTH_CHECK: len(L['self'].transformer_blocks[2].ff_context.net) == 3 # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff_context.net.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff_context.net.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].ff_context.net.training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff_context.net[0], accessed_by=GetItemGuardAccessor(0) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].ff_context.net[0], 140581767531248) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff_context.net[0].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[2].ff_context.net[0].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff_context.net[0].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].ff_context.net[0].training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff_context.net[0]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff_context.net[0].proj, accessed_by=DictGetItemGuardAccessor(proj) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].ff_context.net[0].proj, 140581767531344) # hidden_states = self.proj(hidden_states) # diffusers/src/diffusers/models/activations.py:88 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff_context.net[0].proj.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff_context.net[0].proj.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].ff_context.net[0].proj.training, 140591004393440) # hidden_states = self.proj(hidden_states) # diffusers/src/diffusers/models/activations.py:88 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff_context.net[0].approximate, accessed_by=DictGetItemGuardAccessor(approximate) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[2].ff_context.net[0].approximate == 'tanh' # return F.gelu(gate, approximate=self.approximate) # diffusers/src/diffusers/models/activations.py:83 in gelu V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff_context.net[0]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff_context.net[0]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff_context.net[0]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff_context.net[0]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff_context.net[1], accessed_by=GetItemGuardAccessor(1) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].ff_context.net[1], 140581767531440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff_context.net[1].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff_context.net[1].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].ff_context.net[1].training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff_context.net[2], accessed_by=GetItemGuardAccessor(2) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].ff_context.net[2], 140581767531488) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff_context.net[2].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff_context.net[2].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].ff_context.net[2].training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff_context._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff_context._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff_context._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff_context._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].norm1_context, accessed_by=DictGetItemGuardAccessor(norm1_context) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].norm1_context, 140581767529664) # norm_encoder_hidden_states, c_gate_msa, c_shift_mlp, c_scale_mlp, c_gate_mlp = self.norm1_context( # diffusers/src/diffusers/models/transformers/transformer_flux.py:167 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].norm1_context.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[2].norm1_context.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].norm1_context.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].norm1_context.training, 140591004393440) # norm_encoder_hidden_states, c_gate_msa, c_shift_mlp, c_scale_mlp, c_gate_mlp = self.norm1_context( # diffusers/src/diffusers/models/transformers/transformer_flux.py:167 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].norm1_context.emb, accessed_by=DictGetItemGuardAccessor(emb) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].norm1_context.emb, 140591004478624) # if self.emb is not None: # diffusers/src/diffusers/models/normalization.py:135 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].norm1_context._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].norm1_context.norm, accessed_by=DictGetItemGuardAccessor(norm) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].norm1_context.norm, 140581767529856) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:139 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].norm1_context.norm.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].norm1_context.norm.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].norm1_context.norm.training, 140591004393440) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:139 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].norm1_context.silu, accessed_by=DictGetItemGuardAccessor(silu) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].norm1_context.silu, 140581767529760) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].norm1_context.silu.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].norm1_context.silu.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].norm1_context.silu.training, 140591004393440) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].norm1_context.linear, accessed_by=DictGetItemGuardAccessor(linear) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].norm1_context.linear, 140581767529808) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].norm1_context.linear.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].norm1_context.linear.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].norm1_context.linear.training, 140591004393440) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].norm1_context._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].norm1_context._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].norm1_context._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].norm1_context._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].norm2_context, accessed_by=DictGetItemGuardAccessor(norm2_context) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].norm2_context, 140581767530864) # norm_encoder_hidden_states = self.norm2_context(encoder_hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:195 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].norm2_context.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].norm2_context.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].norm2_context.training, 140591004393440) # norm_encoder_hidden_states = self.norm2_context(encoder_hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:195 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | +- GuardManager: source=L['self'].transformer_blocks[3], accessed_by=GetItemGuardAccessor(3) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3], 140581767529136) # for index_block, block in enumerate(self.transformer_blocks): # diffusers/src/diffusers/models/transformers/transformer_flux.py:471 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[3].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].training, 140591004393440) # for index_block, block in enumerate(self.transformer_blocks): # diffusers/src/diffusers/models/transformers/transformer_flux.py:471 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff, accessed_by=DictGetItemGuardAccessor(ff) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].ff, 140581767532784) # ff_output = self.ff(norm_hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:185 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[3].ff.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].ff.training, 140591004393440) # ff_output = self.ff(norm_hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:185 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff.net, accessed_by=DictGetItemGuardAccessor(net) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].ff.net, 140581767533024) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[3].ff.net, 93831537618768) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- LENGTH_CHECK: len(L['self'].transformer_blocks[3].ff.net) == 3 # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff.net.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff.net.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].ff.net.training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff.net[0], accessed_by=GetItemGuardAccessor(0) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].ff.net[0], 140581767532976) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff.net[0].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[3].ff.net[0].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff.net[0].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].ff.net[0].training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff.net[0]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff.net[0].proj, accessed_by=DictGetItemGuardAccessor(proj) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].ff.net[0].proj, 140581767533072) # hidden_states = self.proj(hidden_states) # diffusers/src/diffusers/models/activations.py:88 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff.net[0].proj.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff.net[0].proj.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].ff.net[0].proj.training, 140591004393440) # hidden_states = self.proj(hidden_states) # diffusers/src/diffusers/models/activations.py:88 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff.net[0].approximate, accessed_by=DictGetItemGuardAccessor(approximate) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[3].ff.net[0].approximate == 'tanh' # return F.gelu(gate, approximate=self.approximate) # diffusers/src/diffusers/models/activations.py:83 in gelu V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff.net[0]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff.net[0]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff.net[0]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff.net[0]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff.net[1], accessed_by=GetItemGuardAccessor(1) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].ff.net[1], 140581767533120) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff.net[1].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff.net[1].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].ff.net[1].training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff.net[2], accessed_by=GetItemGuardAccessor(2) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].ff.net[2], 140581767533168) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff.net[2].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff.net[2].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].ff.net[2].training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn, accessed_by=DictGetItemGuardAccessor(attn) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn, 140581767532016) # attn_output, context_attn_output = self.attn( # diffusers/src/diffusers/models/transformers/transformer_flux.py:172 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[3].attn.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.training, 140591004393440) # attn_output, context_attn_output = self.attn( # diffusers/src/diffusers/models/transformers/transformer_flux.py:172 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_k, accessed_by=DictGetItemGuardAccessor(to_k) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.to_k, 140581767532160) # key = attn.to_k(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1717 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.to_k.training, 140591004393440) # key = attn.to_k(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1717 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_q, accessed_by=DictGetItemGuardAccessor(to_q) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.to_q, 140581767532256) # query = attn.to_q(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1716 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.to_q.training, 140591004393440) # query = attn.to_q(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1716 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_v, accessed_by=DictGetItemGuardAccessor(to_v) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.to_v, 140581767532352) # value = attn.to_v(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1718 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_v.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_v.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.to_v.training, 140591004393440) # value = attn.to_v(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1718 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.norm_k, accessed_by=DictGetItemGuardAccessor(norm_k) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.norm_k, 140581767532208) # if attn.norm_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1729 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.norm_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[3].attn.norm_k.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.norm_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.norm_k.training, 140591004393440) # if attn.norm_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1729 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.norm_k.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[3].attn.norm_k.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.norm_k._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.norm_k.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.norm_k.weight, 140581766002016) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.norm_k._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.norm_k._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.norm_k._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.norm_k._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.norm_q, accessed_by=DictGetItemGuardAccessor(norm_q) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.norm_q, 140581767532112) # if attn.norm_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1727 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.norm_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[3].attn.norm_q.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.norm_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.norm_q.training, 140591004393440) # if attn.norm_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1727 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.norm_q.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[3].attn.norm_q.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.norm_q._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.norm_q.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.norm_q.weight, 140581766002096) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.norm_q._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.norm_q._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.norm_q._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.norm_q._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_out, accessed_by=DictGetItemGuardAccessor(to_out) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.to_out, 140581767532544) # hidden_states = attn.to_out[0](hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1776 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_out.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_out.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.to_out.training, 140591004393440) # hidden_states = attn.to_out[0](hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1776 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_out[0], accessed_by=GetItemGuardAccessor(0) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.to_out[0], 140581767532592) # hidden_states = attn.to_out[0](hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1776 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_out[0].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_out[0].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.to_out[0].training, 140591004393440) # hidden_states = attn.to_out[0](hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1776 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_out[1], accessed_by=GetItemGuardAccessor(1) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.to_out[1], 140581767532640) # hidden_states = attn.to_out[1](hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1778 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_out[1].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_out[1].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.to_out[1].training, 140591004393440) # hidden_states = attn.to_out[1](hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1778 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.add_k_proj, accessed_by=DictGetItemGuardAccessor(add_k_proj) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.add_k_proj, 140581767532400) # encoder_hidden_states_key_proj = attn.add_k_proj(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1736 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.add_k_proj.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.add_k_proj.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.add_k_proj.training, 140591004393440) # encoder_hidden_states_key_proj = attn.add_k_proj(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1736 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.add_q_proj, accessed_by=DictGetItemGuardAccessor(add_q_proj) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.add_q_proj, 140581767532496) # encoder_hidden_states_query_proj = attn.add_q_proj(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1735 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.add_q_proj.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.add_q_proj.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.add_q_proj.training, 140591004393440) # encoder_hidden_states_query_proj = attn.add_q_proj(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1735 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.add_v_proj, accessed_by=DictGetItemGuardAccessor(add_v_proj) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.add_v_proj, 140581767532448) # encoder_hidden_states_value_proj = attn.add_v_proj(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1737 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.add_v_proj.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.add_v_proj.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.add_v_proj.training, 140591004393440) # encoder_hidden_states_value_proj = attn.add_v_proj(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1737 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_add_out, accessed_by=DictGetItemGuardAccessor(to_add_out) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.to_add_out, 140581767532688) # encoder_hidden_states = attn.to_add_out(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1779 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_add_out.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_add_out.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.to_add_out.training, 140591004393440) # encoder_hidden_states = attn.to_add_out(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1779 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.norm_added_k, accessed_by=DictGetItemGuardAccessor(norm_added_k) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.norm_added_k, 140581767532832) # if attn.norm_added_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1751 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.norm_added_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[3].attn.norm_added_k.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.norm_added_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.norm_added_k.training, 140591004393440) # if attn.norm_added_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1751 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.norm_added_k.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[3].attn.norm_added_k.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.norm_added_k._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.norm_added_k.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.norm_added_k.weight, 140581766001856) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.norm_added_k._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.norm_added_k._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.norm_added_k._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.norm_added_k._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.norm_added_q, accessed_by=DictGetItemGuardAccessor(norm_added_q) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.norm_added_q, 140581767532736) # if attn.norm_added_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1749 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.norm_added_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[3].attn.norm_added_q.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.norm_added_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.norm_added_q.training, 140591004393440) # if attn.norm_added_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1749 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.norm_added_q.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[3].attn.norm_added_q.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.norm_added_q._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.norm_added_q.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.norm_added_q.weight, 140581766001936) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.norm_added_q._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.norm_added_q._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.norm_added_q._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.norm_added_q._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.heads, accessed_by=DictGetItemGuardAccessor(heads) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[3].attn.heads == 24 # head_dim = inner_dim // attn.heads # diffusers/src/diffusers/models/attention_processor.py:1721 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.processor, accessed_by=DictGetItemGuardAccessor(processor) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[3].attn.processor, 93831581524080) # attn_parameters = set(inspect.signature(self.processor.__call__).parameters.keys()) # diffusers/src/diffusers/models/attention_processor.py:479 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.processor, 140581767531968) # return self.processor( # diffusers/src/diffusers/models/attention_processor.py:490 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].norm1, accessed_by=DictGetItemGuardAccessor(norm1) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].norm1, 140581767531536) # norm_hidden_states, gate_msa, shift_mlp, scale_mlp, gate_mlp = self.norm1(hidden_states, emb=temb) # diffusers/src/diffusers/models/transformers/transformer_flux.py:165 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].norm1.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[3].norm1.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].norm1.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].norm1.training, 140591004393440) # norm_hidden_states, gate_msa, shift_mlp, scale_mlp, gate_mlp = self.norm1(hidden_states, emb=temb) # diffusers/src/diffusers/models/transformers/transformer_flux.py:165 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].norm1.emb, accessed_by=DictGetItemGuardAccessor(emb) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].norm1.emb, 140591004478624) # if self.emb is not None: # diffusers/src/diffusers/models/normalization.py:135 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].norm1._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].norm1.norm, accessed_by=DictGetItemGuardAccessor(norm) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].norm1.norm, 140581767531680) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:139 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].norm1.norm.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].norm1.norm.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].norm1.norm.training, 140591004393440) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:139 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].norm1.silu, accessed_by=DictGetItemGuardAccessor(silu) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].norm1.silu, 140581767531584) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].norm1.silu.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].norm1.silu.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].norm1.silu.training, 140591004393440) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].norm1.linear, accessed_by=DictGetItemGuardAccessor(linear) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].norm1.linear, 140581767531632) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].norm1.linear.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].norm1.linear.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].norm1.linear.training, 140591004393440) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].norm1._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].norm1._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].norm1._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].norm1._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].norm2, accessed_by=DictGetItemGuardAccessor(norm2) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].norm2, 140581767532880) # norm_hidden_states = self.norm2(hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:182 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].norm2.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].norm2.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].norm2.training, 140591004393440) # norm_hidden_states = self.norm2(hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:182 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff_context, accessed_by=DictGetItemGuardAccessor(ff_context) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].ff_context, 140581767533216) # context_ff_output = self.ff_context(norm_encoder_hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:198 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff_context.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[3].ff_context.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff_context.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].ff_context.training, 140591004393440) # context_ff_output = self.ff_context(norm_encoder_hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:198 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff_context._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff_context.net, accessed_by=DictGetItemGuardAccessor(net) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].ff_context.net, 140581767533360) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[3].ff_context.net, 93831537618768) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- LENGTH_CHECK: len(L['self'].transformer_blocks[3].ff_context.net) == 3 # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff_context.net.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff_context.net.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].ff_context.net.training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff_context.net[0], accessed_by=GetItemGuardAccessor(0) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].ff_context.net[0], 140581767533312) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff_context.net[0].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[3].ff_context.net[0].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff_context.net[0].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].ff_context.net[0].training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff_context.net[0]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff_context.net[0].proj, accessed_by=DictGetItemGuardAccessor(proj) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].ff_context.net[0].proj, 140581767533408) # hidden_states = self.proj(hidden_states) # diffusers/src/diffusers/models/activations.py:88 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff_context.net[0].proj.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff_context.net[0].proj.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].ff_context.net[0].proj.training, 140591004393440) # hidden_states = self.proj(hidden_states) # diffusers/src/diffusers/models/activations.py:88 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff_context.net[0].approximate, accessed_by=DictGetItemGuardAccessor(approximate) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[3].ff_context.net[0].approximate == 'tanh' # return F.gelu(gate, approximate=self.approximate) # diffusers/src/diffusers/models/activations.py:83 in gelu V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff_context.net[0]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff_context.net[0]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff_context.net[0]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff_context.net[0]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff_context.net[1], accessed_by=GetItemGuardAccessor(1) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].ff_context.net[1], 140581767533504) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff_context.net[1].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff_context.net[1].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].ff_context.net[1].training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff_context.net[2], accessed_by=GetItemGuardAccessor(2) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].ff_context.net[2], 140581767533552) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff_context.net[2].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff_context.net[2].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].ff_context.net[2].training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff_context._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff_context._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff_context._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff_context._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].norm1_context, accessed_by=DictGetItemGuardAccessor(norm1_context) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].norm1_context, 140581767531728) # norm_encoder_hidden_states, c_gate_msa, c_shift_mlp, c_scale_mlp, c_gate_mlp = self.norm1_context( # diffusers/src/diffusers/models/transformers/transformer_flux.py:167 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].norm1_context.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[3].norm1_context.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].norm1_context.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].norm1_context.training, 140591004393440) # norm_encoder_hidden_states, c_gate_msa, c_shift_mlp, c_scale_mlp, c_gate_mlp = self.norm1_context( # diffusers/src/diffusers/models/transformers/transformer_flux.py:167 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].norm1_context.emb, accessed_by=DictGetItemGuardAccessor(emb) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].norm1_context.emb, 140591004478624) # if self.emb is not None: # diffusers/src/diffusers/models/normalization.py:135 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].norm1_context._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].norm1_context.norm, accessed_by=DictGetItemGuardAccessor(norm) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].norm1_context.norm, 140581767531920) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:139 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].norm1_context.norm.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].norm1_context.norm.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].norm1_context.norm.training, 140591004393440) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:139 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].norm1_context.silu, accessed_by=DictGetItemGuardAccessor(silu) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].norm1_context.silu, 140581767531824) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].norm1_context.silu.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].norm1_context.silu.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].norm1_context.silu.training, 140591004393440) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].norm1_context.linear, accessed_by=DictGetItemGuardAccessor(linear) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].norm1_context.linear, 140581767531872) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].norm1_context.linear.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].norm1_context.linear.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].norm1_context.linear.training, 140591004393440) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].norm1_context._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].norm1_context._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].norm1_context._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].norm1_context._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].norm2_context, accessed_by=DictGetItemGuardAccessor(norm2_context) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].norm2_context, 140581767532928) # norm_encoder_hidden_states = self.norm2_context(encoder_hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:195 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].norm2_context.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].norm2_context.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].norm2_context.training, 140591004393440) # norm_encoder_hidden_states = self.norm2_context(encoder_hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:195 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | +- GuardManager: source=L['self'].transformer_blocks[4], accessed_by=GetItemGuardAccessor(4) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4], 140581767531200) # for index_block, block in enumerate(self.transformer_blocks): # diffusers/src/diffusers/models/transformers/transformer_flux.py:471 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[4].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].training, 140591004393440) # for index_block, block in enumerate(self.transformer_blocks): # diffusers/src/diffusers/models/transformers/transformer_flux.py:471 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff, accessed_by=DictGetItemGuardAccessor(ff) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].ff, 140581767534848) # ff_output = self.ff(norm_hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:185 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[4].ff.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].ff.training, 140591004393440) # ff_output = self.ff(norm_hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:185 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff.net, accessed_by=DictGetItemGuardAccessor(net) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].ff.net, 140581767535088) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[4].ff.net, 93831537618768) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- LENGTH_CHECK: len(L['self'].transformer_blocks[4].ff.net) == 3 # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff.net.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff.net.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].ff.net.training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff.net[0], accessed_by=GetItemGuardAccessor(0) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].ff.net[0], 140581767535040) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff.net[0].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[4].ff.net[0].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff.net[0].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].ff.net[0].training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff.net[0]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff.net[0].proj, accessed_by=DictGetItemGuardAccessor(proj) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].ff.net[0].proj, 140581767535136) # hidden_states = self.proj(hidden_states) # diffusers/src/diffusers/models/activations.py:88 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff.net[0].proj.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff.net[0].proj.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].ff.net[0].proj.training, 140591004393440) # hidden_states = self.proj(hidden_states) # diffusers/src/diffusers/models/activations.py:88 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff.net[0].approximate, accessed_by=DictGetItemGuardAccessor(approximate) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[4].ff.net[0].approximate == 'tanh' # return F.gelu(gate, approximate=self.approximate) # diffusers/src/diffusers/models/activations.py:83 in gelu V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff.net[0]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff.net[0]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff.net[0]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff.net[0]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff.net[1], accessed_by=GetItemGuardAccessor(1) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].ff.net[1], 140581767535184) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff.net[1].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff.net[1].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].ff.net[1].training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff.net[2], accessed_by=GetItemGuardAccessor(2) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].ff.net[2], 140581767535232) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff.net[2].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff.net[2].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].ff.net[2].training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn, accessed_by=DictGetItemGuardAccessor(attn) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn, 140581767534080) # attn_output, context_attn_output = self.attn( # diffusers/src/diffusers/models/transformers/transformer_flux.py:172 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[4].attn.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.training, 140591004393440) # attn_output, context_attn_output = self.attn( # diffusers/src/diffusers/models/transformers/transformer_flux.py:172 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_k, accessed_by=DictGetItemGuardAccessor(to_k) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.to_k, 140581767534224) # key = attn.to_k(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1717 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.to_k.training, 140591004393440) # key = attn.to_k(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1717 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_q, accessed_by=DictGetItemGuardAccessor(to_q) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.to_q, 140581767534320) # query = attn.to_q(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1716 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.to_q.training, 140591004393440) # query = attn.to_q(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1716 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_v, accessed_by=DictGetItemGuardAccessor(to_v) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.to_v, 140581767534416) # value = attn.to_v(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1718 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_v.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_v.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.to_v.training, 140591004393440) # value = attn.to_v(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1718 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.norm_k, accessed_by=DictGetItemGuardAccessor(norm_k) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.norm_k, 140581767534272) # if attn.norm_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1729 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.norm_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[4].attn.norm_k.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.norm_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.norm_k.training, 140591004393440) # if attn.norm_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1729 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.norm_k.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[4].attn.norm_k.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.norm_k._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.norm_k.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.norm_k.weight, 140581766004096) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.norm_k._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.norm_k._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.norm_k._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.norm_k._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.norm_q, accessed_by=DictGetItemGuardAccessor(norm_q) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.norm_q, 140581767534176) # if attn.norm_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1727 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.norm_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[4].attn.norm_q.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.norm_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.norm_q.training, 140591004393440) # if attn.norm_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1727 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.norm_q.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[4].attn.norm_q.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.norm_q._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.norm_q.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.norm_q.weight, 140581783273312) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.norm_q._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.norm_q._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.norm_q._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.norm_q._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_out, accessed_by=DictGetItemGuardAccessor(to_out) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.to_out, 140581767534608) # hidden_states = attn.to_out[0](hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1776 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_out.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_out.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.to_out.training, 140591004393440) # hidden_states = attn.to_out[0](hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1776 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_out[0], accessed_by=GetItemGuardAccessor(0) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.to_out[0], 140581767534656) # hidden_states = attn.to_out[0](hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1776 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_out[0].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_out[0].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.to_out[0].training, 140591004393440) # hidden_states = attn.to_out[0](hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1776 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_out[1], accessed_by=GetItemGuardAccessor(1) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.to_out[1], 140581767534704) # hidden_states = attn.to_out[1](hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1778 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_out[1].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_out[1].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.to_out[1].training, 140591004393440) # hidden_states = attn.to_out[1](hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1778 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.add_k_proj, accessed_by=DictGetItemGuardAccessor(add_k_proj) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.add_k_proj, 140581767534464) # encoder_hidden_states_key_proj = attn.add_k_proj(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1736 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.add_k_proj.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.add_k_proj.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.add_k_proj.training, 140591004393440) # encoder_hidden_states_key_proj = attn.add_k_proj(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1736 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.add_q_proj, accessed_by=DictGetItemGuardAccessor(add_q_proj) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.add_q_proj, 140581767534560) # encoder_hidden_states_query_proj = attn.add_q_proj(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1735 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.add_q_proj.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.add_q_proj.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.add_q_proj.training, 140591004393440) # encoder_hidden_states_query_proj = attn.add_q_proj(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1735 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.add_v_proj, accessed_by=DictGetItemGuardAccessor(add_v_proj) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.add_v_proj, 140581767534512) # encoder_hidden_states_value_proj = attn.add_v_proj(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1737 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.add_v_proj.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.add_v_proj.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.add_v_proj.training, 140591004393440) # encoder_hidden_states_value_proj = attn.add_v_proj(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1737 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_add_out, accessed_by=DictGetItemGuardAccessor(to_add_out) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.to_add_out, 140581767534752) # encoder_hidden_states = attn.to_add_out(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1779 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_add_out.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_add_out.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.to_add_out.training, 140591004393440) # encoder_hidden_states = attn.to_add_out(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1779 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.norm_added_k, accessed_by=DictGetItemGuardAccessor(norm_added_k) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.norm_added_k, 140581767534896) # if attn.norm_added_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1751 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.norm_added_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[4].attn.norm_added_k.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.norm_added_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.norm_added_k.training, 140591004393440) # if attn.norm_added_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1751 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.norm_added_k.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[4].attn.norm_added_k.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.norm_added_k._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.norm_added_k.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.norm_added_k.weight, 140581766003936) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.norm_added_k._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.norm_added_k._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.norm_added_k._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.norm_added_k._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.norm_added_q, accessed_by=DictGetItemGuardAccessor(norm_added_q) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.norm_added_q, 140581767534800) # if attn.norm_added_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1749 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.norm_added_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[4].attn.norm_added_q.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.norm_added_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.norm_added_q.training, 140591004393440) # if attn.norm_added_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1749 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.norm_added_q.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[4].attn.norm_added_q.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.norm_added_q._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.norm_added_q.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.norm_added_q.weight, 140581766004016) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.norm_added_q._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.norm_added_q._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.norm_added_q._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.norm_added_q._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.heads, accessed_by=DictGetItemGuardAccessor(heads) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[4].attn.heads == 24 # head_dim = inner_dim // attn.heads # diffusers/src/diffusers/models/attention_processor.py:1721 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.processor, accessed_by=DictGetItemGuardAccessor(processor) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[4].attn.processor, 93831581524080) # attn_parameters = set(inspect.signature(self.processor.__call__).parameters.keys()) # diffusers/src/diffusers/models/attention_processor.py:479 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.processor, 140581767534032) # return self.processor( # diffusers/src/diffusers/models/attention_processor.py:490 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].norm1, accessed_by=DictGetItemGuardAccessor(norm1) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].norm1, 140581767533600) # norm_hidden_states, gate_msa, shift_mlp, scale_mlp, gate_mlp = self.norm1(hidden_states, emb=temb) # diffusers/src/diffusers/models/transformers/transformer_flux.py:165 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].norm1.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[4].norm1.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].norm1.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].norm1.training, 140591004393440) # norm_hidden_states, gate_msa, shift_mlp, scale_mlp, gate_mlp = self.norm1(hidden_states, emb=temb) # diffusers/src/diffusers/models/transformers/transformer_flux.py:165 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].norm1.emb, accessed_by=DictGetItemGuardAccessor(emb) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].norm1.emb, 140591004478624) # if self.emb is not None: # diffusers/src/diffusers/models/normalization.py:135 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].norm1._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].norm1.norm, accessed_by=DictGetItemGuardAccessor(norm) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].norm1.norm, 140581767533744) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:139 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].norm1.norm.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].norm1.norm.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].norm1.norm.training, 140591004393440) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:139 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].norm1.silu, accessed_by=DictGetItemGuardAccessor(silu) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].norm1.silu, 140581767533648) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].norm1.silu.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].norm1.silu.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].norm1.silu.training, 140591004393440) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].norm1.linear, accessed_by=DictGetItemGuardAccessor(linear) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].norm1.linear, 140581767533696) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].norm1.linear.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].norm1.linear.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].norm1.linear.training, 140591004393440) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].norm1._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].norm1._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].norm1._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].norm1._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].norm2, accessed_by=DictGetItemGuardAccessor(norm2) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].norm2, 140581767534944) # norm_hidden_states = self.norm2(hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:182 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].norm2.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].norm2.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].norm2.training, 140591004393440) # norm_hidden_states = self.norm2(hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:182 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff_context, accessed_by=DictGetItemGuardAccessor(ff_context) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].ff_context, 140581767535280) # context_ff_output = self.ff_context(norm_encoder_hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:198 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff_context.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[4].ff_context.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff_context.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].ff_context.training, 140591004393440) # context_ff_output = self.ff_context(norm_encoder_hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:198 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff_context._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff_context.net, accessed_by=DictGetItemGuardAccessor(net) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].ff_context.net, 140581767535424) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[4].ff_context.net, 93831537618768) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- LENGTH_CHECK: len(L['self'].transformer_blocks[4].ff_context.net) == 3 # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff_context.net.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff_context.net.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].ff_context.net.training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff_context.net[0], accessed_by=GetItemGuardAccessor(0) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].ff_context.net[0], 140581767535376) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff_context.net[0].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[4].ff_context.net[0].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff_context.net[0].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].ff_context.net[0].training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff_context.net[0]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff_context.net[0].proj, accessed_by=DictGetItemGuardAccessor(proj) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].ff_context.net[0].proj, 140581767535472) # hidden_states = self.proj(hidden_states) # diffusers/src/diffusers/models/activations.py:88 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff_context.net[0].proj.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff_context.net[0].proj.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].ff_context.net[0].proj.training, 140591004393440) # hidden_states = self.proj(hidden_states) # diffusers/src/diffusers/models/activations.py:88 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff_context.net[0].approximate, accessed_by=DictGetItemGuardAccessor(approximate) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[4].ff_context.net[0].approximate == 'tanh' # return F.gelu(gate, approximate=self.approximate) # diffusers/src/diffusers/models/activations.py:83 in gelu V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff_context.net[0]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff_context.net[0]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff_context.net[0]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff_context.net[0]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff_context.net[1], accessed_by=GetItemGuardAccessor(1) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].ff_context.net[1], 140581767535568) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff_context.net[1].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff_context.net[1].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].ff_context.net[1].training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff_context.net[2], accessed_by=GetItemGuardAccessor(2) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].ff_context.net[2], 140581767535616) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff_context.net[2].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff_context.net[2].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].ff_context.net[2].training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff_context._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff_context._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff_context._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff_context._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].norm1_context, accessed_by=DictGetItemGuardAccessor(norm1_context) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].norm1_context, 140581767533792) # norm_encoder_hidden_states, c_gate_msa, c_shift_mlp, c_scale_mlp, c_gate_mlp = self.norm1_context( # diffusers/src/diffusers/models/transformers/transformer_flux.py:167 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].norm1_context.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[4].norm1_context.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].norm1_context.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].norm1_context.training, 140591004393440) # norm_encoder_hidden_states, c_gate_msa, c_shift_mlp, c_scale_mlp, c_gate_mlp = self.norm1_context( # diffusers/src/diffusers/models/transformers/transformer_flux.py:167 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].norm1_context.emb, accessed_by=DictGetItemGuardAccessor(emb) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].norm1_context.emb, 140591004478624) # if self.emb is not None: # diffusers/src/diffusers/models/normalization.py:135 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].norm1_context._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].norm1_context.norm, accessed_by=DictGetItemGuardAccessor(norm) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].norm1_context.norm, 140581767533984) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:139 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].norm1_context.norm.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].norm1_context.norm.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].norm1_context.norm.training, 140591004393440) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:139 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].norm1_context.silu, accessed_by=DictGetItemGuardAccessor(silu) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].norm1_context.silu, 140581767533888) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].norm1_context.silu.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].norm1_context.silu.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].norm1_context.silu.training, 140591004393440) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].norm1_context.linear, accessed_by=DictGetItemGuardAccessor(linear) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].norm1_context.linear, 140581767533936) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].norm1_context.linear.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].norm1_context.linear.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].norm1_context.linear.training, 140591004393440) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].norm1_context._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].norm1_context._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].norm1_context._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].norm1_context._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].norm2_context, accessed_by=DictGetItemGuardAccessor(norm2_context) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].norm2_context, 140581767534992) # norm_encoder_hidden_states = self.norm2_context(encoder_hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:195 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].norm2_context.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].norm2_context.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].norm2_context.training, 140591004393440) # norm_encoder_hidden_states = self.norm2_context(encoder_hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:195 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | +- GuardManager: source=L['self'].transformer_blocks[5], accessed_by=GetItemGuardAccessor(5) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5], 140581767533264) # for index_block, block in enumerate(self.transformer_blocks): # diffusers/src/diffusers/models/transformers/transformer_flux.py:471 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[5].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].training, 140591004393440) # for index_block, block in enumerate(self.transformer_blocks): # diffusers/src/diffusers/models/transformers/transformer_flux.py:471 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff, accessed_by=DictGetItemGuardAccessor(ff) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].ff, 140581767536912) # ff_output = self.ff(norm_hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:185 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[5].ff.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].ff.training, 140591004393440) # ff_output = self.ff(norm_hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:185 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff.net, accessed_by=DictGetItemGuardAccessor(net) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].ff.net, 140581767537152) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[5].ff.net, 93831537618768) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- LENGTH_CHECK: len(L['self'].transformer_blocks[5].ff.net) == 3 # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff.net.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff.net.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].ff.net.training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff.net[0], accessed_by=GetItemGuardAccessor(0) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].ff.net[0], 140581767537104) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff.net[0].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[5].ff.net[0].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff.net[0].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].ff.net[0].training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff.net[0]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff.net[0].proj, accessed_by=DictGetItemGuardAccessor(proj) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].ff.net[0].proj, 140581767537200) # hidden_states = self.proj(hidden_states) # diffusers/src/diffusers/models/activations.py:88 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff.net[0].proj.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff.net[0].proj.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].ff.net[0].proj.training, 140591004393440) # hidden_states = self.proj(hidden_states) # diffusers/src/diffusers/models/activations.py:88 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff.net[0].approximate, accessed_by=DictGetItemGuardAccessor(approximate) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[5].ff.net[0].approximate == 'tanh' # return F.gelu(gate, approximate=self.approximate) # diffusers/src/diffusers/models/activations.py:83 in gelu V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff.net[0]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff.net[0]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff.net[0]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff.net[0]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff.net[1], accessed_by=GetItemGuardAccessor(1) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].ff.net[1], 140581767537248) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff.net[1].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff.net[1].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].ff.net[1].training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff.net[2], accessed_by=GetItemGuardAccessor(2) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].ff.net[2], 140581767537296) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff.net[2].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff.net[2].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].ff.net[2].training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn, accessed_by=DictGetItemGuardAccessor(attn) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn, 140581767536144) # attn_output, context_attn_output = self.attn( # diffusers/src/diffusers/models/transformers/transformer_flux.py:172 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[5].attn.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.training, 140591004393440) # attn_output, context_attn_output = self.attn( # diffusers/src/diffusers/models/transformers/transformer_flux.py:172 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_k, accessed_by=DictGetItemGuardAccessor(to_k) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.to_k, 140581767536288) # key = attn.to_k(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1717 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.to_k.training, 140591004393440) # key = attn.to_k(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1717 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_q, accessed_by=DictGetItemGuardAccessor(to_q) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.to_q, 140581767536384) # query = attn.to_q(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1716 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.to_q.training, 140591004393440) # query = attn.to_q(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1716 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_v, accessed_by=DictGetItemGuardAccessor(to_v) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.to_v, 140581767536480) # value = attn.to_v(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1718 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_v.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_v.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.to_v.training, 140591004393440) # value = attn.to_v(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1718 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.norm_k, accessed_by=DictGetItemGuardAccessor(norm_k) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.norm_k, 140581767536336) # if attn.norm_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1729 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.norm_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[5].attn.norm_k.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.norm_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.norm_k.training, 140591004393440) # if attn.norm_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1729 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.norm_k.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[5].attn.norm_k.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.norm_k._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.norm_k.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.norm_k.weight, 140581765824816) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.norm_k._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.norm_k._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.norm_k._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.norm_k._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.norm_q, accessed_by=DictGetItemGuardAccessor(norm_q) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.norm_q, 140581767536240) # if attn.norm_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1727 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.norm_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[5].attn.norm_q.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.norm_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.norm_q.training, 140591004393440) # if attn.norm_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1727 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.norm_q.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[5].attn.norm_q.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.norm_q._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.norm_q.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.norm_q.weight, 140581772716176) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.norm_q._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.norm_q._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.norm_q._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.norm_q._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_out, accessed_by=DictGetItemGuardAccessor(to_out) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.to_out, 140581767536672) # hidden_states = attn.to_out[0](hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1776 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_out.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_out.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.to_out.training, 140591004393440) # hidden_states = attn.to_out[0](hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1776 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_out[0], accessed_by=GetItemGuardAccessor(0) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.to_out[0], 140581767536720) # hidden_states = attn.to_out[0](hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1776 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_out[0].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_out[0].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.to_out[0].training, 140591004393440) # hidden_states = attn.to_out[0](hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1776 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_out[1], accessed_by=GetItemGuardAccessor(1) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.to_out[1], 140581767536768) # hidden_states = attn.to_out[1](hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1778 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_out[1].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_out[1].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.to_out[1].training, 140591004393440) # hidden_states = attn.to_out[1](hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1778 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.add_k_proj, accessed_by=DictGetItemGuardAccessor(add_k_proj) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.add_k_proj, 140581767536528) # encoder_hidden_states_key_proj = attn.add_k_proj(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1736 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.add_k_proj.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.add_k_proj.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.add_k_proj.training, 140591004393440) # encoder_hidden_states_key_proj = attn.add_k_proj(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1736 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.add_q_proj, accessed_by=DictGetItemGuardAccessor(add_q_proj) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.add_q_proj, 140581767536624) # encoder_hidden_states_query_proj = attn.add_q_proj(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1735 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.add_q_proj.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.add_q_proj.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.add_q_proj.training, 140591004393440) # encoder_hidden_states_query_proj = attn.add_q_proj(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1735 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.add_v_proj, accessed_by=DictGetItemGuardAccessor(add_v_proj) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.add_v_proj, 140581767536576) # encoder_hidden_states_value_proj = attn.add_v_proj(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1737 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.add_v_proj.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.add_v_proj.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.add_v_proj.training, 140591004393440) # encoder_hidden_states_value_proj = attn.add_v_proj(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1737 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_add_out, accessed_by=DictGetItemGuardAccessor(to_add_out) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.to_add_out, 140581767536816) # encoder_hidden_states = attn.to_add_out(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1779 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_add_out.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_add_out.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.to_add_out.training, 140591004393440) # encoder_hidden_states = attn.to_add_out(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1779 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.norm_added_k, accessed_by=DictGetItemGuardAccessor(norm_added_k) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.norm_added_k, 140581767536960) # if attn.norm_added_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1751 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.norm_added_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[5].attn.norm_added_k.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.norm_added_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.norm_added_k.training, 140591004393440) # if attn.norm_added_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1751 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.norm_added_k.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[5].attn.norm_added_k.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.norm_added_k._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.norm_added_k.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.norm_added_k.weight, 140581772712736) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.norm_added_k._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.norm_added_k._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.norm_added_k._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.norm_added_k._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.norm_added_q, accessed_by=DictGetItemGuardAccessor(norm_added_q) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.norm_added_q, 140581767536864) # if attn.norm_added_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1749 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.norm_added_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[5].attn.norm_added_q.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.norm_added_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.norm_added_q.training, 140591004393440) # if attn.norm_added_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1749 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.norm_added_q.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[5].attn.norm_added_q.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.norm_added_q._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.norm_added_q.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.norm_added_q.weight, 140581772711536) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.norm_added_q._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.norm_added_q._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.norm_added_q._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.norm_added_q._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.heads, accessed_by=DictGetItemGuardAccessor(heads) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[5].attn.heads == 24 # head_dim = inner_dim // attn.heads # diffusers/src/diffusers/models/attention_processor.py:1721 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.processor, accessed_by=DictGetItemGuardAccessor(processor) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[5].attn.processor, 93831581524080) # attn_parameters = set(inspect.signature(self.processor.__call__).parameters.keys()) # diffusers/src/diffusers/models/attention_processor.py:479 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.processor, 140581767536096) # return self.processor( # diffusers/src/diffusers/models/attention_processor.py:490 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].norm1, accessed_by=DictGetItemGuardAccessor(norm1) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].norm1, 140581767535664) # norm_hidden_states, gate_msa, shift_mlp, scale_mlp, gate_mlp = self.norm1(hidden_states, emb=temb) # diffusers/src/diffusers/models/transformers/transformer_flux.py:165 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].norm1.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[5].norm1.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].norm1.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].norm1.training, 140591004393440) # norm_hidden_states, gate_msa, shift_mlp, scale_mlp, gate_mlp = self.norm1(hidden_states, emb=temb) # diffusers/src/diffusers/models/transformers/transformer_flux.py:165 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].norm1.emb, accessed_by=DictGetItemGuardAccessor(emb) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].norm1.emb, 140591004478624) # if self.emb is not None: # diffusers/src/diffusers/models/normalization.py:135 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].norm1._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].norm1.norm, accessed_by=DictGetItemGuardAccessor(norm) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].norm1.norm, 140581767535808) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:139 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].norm1.norm.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].norm1.norm.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].norm1.norm.training, 140591004393440) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:139 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].norm1.silu, accessed_by=DictGetItemGuardAccessor(silu) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].norm1.silu, 140581767535712) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].norm1.silu.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].norm1.silu.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].norm1.silu.training, 140591004393440) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].norm1.linear, accessed_by=DictGetItemGuardAccessor(linear) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].norm1.linear, 140581767535760) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].norm1.linear.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].norm1.linear.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].norm1.linear.training, 140591004393440) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].norm1._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].norm1._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].norm1._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].norm1._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].norm2, accessed_by=DictGetItemGuardAccessor(norm2) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].norm2, 140581767537008) # norm_hidden_states = self.norm2(hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:182 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].norm2.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].norm2.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].norm2.training, 140591004393440) # norm_hidden_states = self.norm2(hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:182 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff_context, accessed_by=DictGetItemGuardAccessor(ff_context) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].ff_context, 140581767537344) # context_ff_output = self.ff_context(norm_encoder_hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:198 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff_context.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[5].ff_context.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff_context.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].ff_context.training, 140591004393440) # context_ff_output = self.ff_context(norm_encoder_hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:198 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff_context._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff_context.net, accessed_by=DictGetItemGuardAccessor(net) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].ff_context.net, 140581767537488) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[5].ff_context.net, 93831537618768) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- LENGTH_CHECK: len(L['self'].transformer_blocks[5].ff_context.net) == 3 # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff_context.net.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff_context.net.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].ff_context.net.training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff_context.net[0], accessed_by=GetItemGuardAccessor(0) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].ff_context.net[0], 140581767537440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff_context.net[0].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[5].ff_context.net[0].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff_context.net[0].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].ff_context.net[0].training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff_context.net[0]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff_context.net[0].proj, accessed_by=DictGetItemGuardAccessor(proj) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].ff_context.net[0].proj, 140581767537536) # hidden_states = self.proj(hidden_states) # diffusers/src/diffusers/models/activations.py:88 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff_context.net[0].proj.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff_context.net[0].proj.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].ff_context.net[0].proj.training, 140591004393440) # hidden_states = self.proj(hidden_states) # diffusers/src/diffusers/models/activations.py:88 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff_context.net[0].approximate, accessed_by=DictGetItemGuardAccessor(approximate) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[5].ff_context.net[0].approximate == 'tanh' # return F.gelu(gate, approximate=self.approximate) # diffusers/src/diffusers/models/activations.py:83 in gelu V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff_context.net[0]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff_context.net[0]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff_context.net[0]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff_context.net[0]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff_context.net[1], accessed_by=GetItemGuardAccessor(1) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].ff_context.net[1], 140581767537632) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff_context.net[1].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff_context.net[1].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].ff_context.net[1].training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff_context.net[2], accessed_by=GetItemGuardAccessor(2) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].ff_context.net[2], 140581767537680) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff_context.net[2].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff_context.net[2].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].ff_context.net[2].training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff_context._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff_context._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff_context._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff_context._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].norm1_context, accessed_by=DictGetItemGuardAccessor(norm1_context) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].norm1_context, 140581767535856) # norm_encoder_hidden_states, c_gate_msa, c_shift_mlp, c_scale_mlp, c_gate_mlp = self.norm1_context( # diffusers/src/diffusers/models/transformers/transformer_flux.py:167 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].norm1_context.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[5].norm1_context.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].norm1_context.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].norm1_context.training, 140591004393440) # norm_encoder_hidden_states, c_gate_msa, c_shift_mlp, c_scale_mlp, c_gate_mlp = self.norm1_context( # diffusers/src/diffusers/models/transformers/transformer_flux.py:167 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].norm1_context.emb, accessed_by=DictGetItemGuardAccessor(emb) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].norm1_context.emb, 140591004478624) # if self.emb is not None: # diffusers/src/diffusers/models/normalization.py:135 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].norm1_context._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].norm1_context.norm, accessed_by=DictGetItemGuardAccessor(norm) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].norm1_context.norm, 140581767536048) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:139 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].norm1_context.norm.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].norm1_context.norm.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].norm1_context.norm.training, 140591004393440) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:139 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].norm1_context.silu, accessed_by=DictGetItemGuardAccessor(silu) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].norm1_context.silu, 140581767535952) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].norm1_context.silu.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].norm1_context.silu.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].norm1_context.silu.training, 140591004393440) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].norm1_context.linear, accessed_by=DictGetItemGuardAccessor(linear) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].norm1_context.linear, 140581767536000) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].norm1_context.linear.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].norm1_context.linear.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].norm1_context.linear.training, 140591004393440) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].norm1_context._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].norm1_context._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].norm1_context._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].norm1_context._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].norm2_context, accessed_by=DictGetItemGuardAccessor(norm2_context) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].norm2_context, 140581767537056) # norm_encoder_hidden_states = self.norm2_context(encoder_hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:195 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].norm2_context.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].norm2_context.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].norm2_context.training, 140591004393440) # norm_encoder_hidden_states = self.norm2_context(encoder_hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:195 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | +- GuardManager: source=L['self'].transformer_blocks[6], accessed_by=GetItemGuardAccessor(6) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6], 140581767535328) # for index_block, block in enumerate(self.transformer_blocks): # diffusers/src/diffusers/models/transformers/transformer_flux.py:471 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[6].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].training, 140591004393440) # for index_block, block in enumerate(self.transformer_blocks): # diffusers/src/diffusers/models/transformers/transformer_flux.py:471 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff, accessed_by=DictGetItemGuardAccessor(ff) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].ff, 140581767538976) # ff_output = self.ff(norm_hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:185 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[6].ff.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].ff.training, 140591004393440) # ff_output = self.ff(norm_hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:185 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff.net, accessed_by=DictGetItemGuardAccessor(net) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].ff.net, 140581767539216) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[6].ff.net, 93831537618768) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- LENGTH_CHECK: len(L['self'].transformer_blocks[6].ff.net) == 3 # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff.net.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff.net.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].ff.net.training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff.net[0], accessed_by=GetItemGuardAccessor(0) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].ff.net[0], 140581767539168) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff.net[0].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[6].ff.net[0].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff.net[0].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].ff.net[0].training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff.net[0]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff.net[0].proj, accessed_by=DictGetItemGuardAccessor(proj) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].ff.net[0].proj, 140581767539264) # hidden_states = self.proj(hidden_states) # diffusers/src/diffusers/models/activations.py:88 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff.net[0].proj.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff.net[0].proj.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].ff.net[0].proj.training, 140591004393440) # hidden_states = self.proj(hidden_states) # diffusers/src/diffusers/models/activations.py:88 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff.net[0].approximate, accessed_by=DictGetItemGuardAccessor(approximate) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[6].ff.net[0].approximate == 'tanh' # return F.gelu(gate, approximate=self.approximate) # diffusers/src/diffusers/models/activations.py:83 in gelu V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff.net[0]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff.net[0]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff.net[0]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff.net[0]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff.net[1], accessed_by=GetItemGuardAccessor(1) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].ff.net[1], 140581767539312) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff.net[1].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff.net[1].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].ff.net[1].training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff.net[2], accessed_by=GetItemGuardAccessor(2) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].ff.net[2], 140581767539360) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff.net[2].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff.net[2].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].ff.net[2].training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn, accessed_by=DictGetItemGuardAccessor(attn) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn, 140581767538208) # attn_output, context_attn_output = self.attn( # diffusers/src/diffusers/models/transformers/transformer_flux.py:172 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[6].attn.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.training, 140591004393440) # attn_output, context_attn_output = self.attn( # diffusers/src/diffusers/models/transformers/transformer_flux.py:172 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_k, accessed_by=DictGetItemGuardAccessor(to_k) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.to_k, 140581767538352) # key = attn.to_k(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1717 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.to_k.training, 140591004393440) # key = attn.to_k(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1717 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_q, accessed_by=DictGetItemGuardAccessor(to_q) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.to_q, 140581767538448) # query = attn.to_q(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1716 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.to_q.training, 140591004393440) # query = attn.to_q(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1716 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_v, accessed_by=DictGetItemGuardAccessor(to_v) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.to_v, 140581767538544) # value = attn.to_v(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1718 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_v.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_v.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.to_v.training, 140591004393440) # value = attn.to_v(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1718 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.norm_k, accessed_by=DictGetItemGuardAccessor(norm_k) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.norm_k, 140581767538400) # if attn.norm_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1729 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.norm_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[6].attn.norm_k.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.norm_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.norm_k.training, 140591004393440) # if attn.norm_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1729 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.norm_k.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[6].attn.norm_k.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.norm_k._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.norm_k.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.norm_k.weight, 140581772709296) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.norm_k._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.norm_k._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.norm_k._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.norm_k._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.norm_q, accessed_by=DictGetItemGuardAccessor(norm_q) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.norm_q, 140581767538304) # if attn.norm_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1727 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.norm_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[6].attn.norm_q.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.norm_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.norm_q.training, 140591004393440) # if attn.norm_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1727 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.norm_q.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[6].attn.norm_q.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.norm_q._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.norm_q.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.norm_q.weight, 140581783065344) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.norm_q._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.norm_q._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.norm_q._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.norm_q._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_out, accessed_by=DictGetItemGuardAccessor(to_out) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.to_out, 140581767538736) # hidden_states = attn.to_out[0](hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1776 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_out.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_out.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.to_out.training, 140591004393440) # hidden_states = attn.to_out[0](hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1776 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_out[0], accessed_by=GetItemGuardAccessor(0) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.to_out[0], 140581767538784) # hidden_states = attn.to_out[0](hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1776 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_out[0].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_out[0].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.to_out[0].training, 140591004393440) # hidden_states = attn.to_out[0](hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1776 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_out[1], accessed_by=GetItemGuardAccessor(1) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.to_out[1], 140581767538832) # hidden_states = attn.to_out[1](hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1778 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_out[1].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_out[1].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.to_out[1].training, 140591004393440) # hidden_states = attn.to_out[1](hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1778 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.add_k_proj, accessed_by=DictGetItemGuardAccessor(add_k_proj) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.add_k_proj, 140581767538592) # encoder_hidden_states_key_proj = attn.add_k_proj(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1736 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.add_k_proj.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.add_k_proj.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.add_k_proj.training, 140591004393440) # encoder_hidden_states_key_proj = attn.add_k_proj(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1736 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.add_q_proj, accessed_by=DictGetItemGuardAccessor(add_q_proj) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.add_q_proj, 140581767538688) # encoder_hidden_states_query_proj = attn.add_q_proj(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1735 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.add_q_proj.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.add_q_proj.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.add_q_proj.training, 140591004393440) # encoder_hidden_states_query_proj = attn.add_q_proj(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1735 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.add_v_proj, accessed_by=DictGetItemGuardAccessor(add_v_proj) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.add_v_proj, 140581767538640) # encoder_hidden_states_value_proj = attn.add_v_proj(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1737 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.add_v_proj.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.add_v_proj.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.add_v_proj.training, 140591004393440) # encoder_hidden_states_value_proj = attn.add_v_proj(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1737 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_add_out, accessed_by=DictGetItemGuardAccessor(to_add_out) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.to_add_out, 140581767538880) # encoder_hidden_states = attn.to_add_out(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1779 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_add_out.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_add_out.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.to_add_out.training, 140591004393440) # encoder_hidden_states = attn.to_add_out(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1779 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.norm_added_k, accessed_by=DictGetItemGuardAccessor(norm_added_k) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.norm_added_k, 140581767539024) # if attn.norm_added_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1751 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.norm_added_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[6].attn.norm_added_k.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.norm_added_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.norm_added_k.training, 140591004393440) # if attn.norm_added_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1751 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.norm_added_k.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[6].attn.norm_added_k.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.norm_added_k._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.norm_added_k.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.norm_added_k.weight, 140581772709936) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.norm_added_k._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.norm_added_k._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.norm_added_k._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.norm_added_k._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.norm_added_q, accessed_by=DictGetItemGuardAccessor(norm_added_q) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.norm_added_q, 140581767538928) # if attn.norm_added_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1749 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.norm_added_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[6].attn.norm_added_q.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.norm_added_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.norm_added_q.training, 140591004393440) # if attn.norm_added_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1749 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.norm_added_q.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[6].attn.norm_added_q.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.norm_added_q._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.norm_added_q.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.norm_added_q.weight, 140581772709216) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.norm_added_q._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.norm_added_q._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.norm_added_q._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.norm_added_q._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.heads, accessed_by=DictGetItemGuardAccessor(heads) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[6].attn.heads == 24 # head_dim = inner_dim // attn.heads # diffusers/src/diffusers/models/attention_processor.py:1721 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.processor, accessed_by=DictGetItemGuardAccessor(processor) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[6].attn.processor, 93831581524080) # attn_parameters = set(inspect.signature(self.processor.__call__).parameters.keys()) # diffusers/src/diffusers/models/attention_processor.py:479 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.processor, 140581767538160) # return self.processor( # diffusers/src/diffusers/models/attention_processor.py:490 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].norm1, accessed_by=DictGetItemGuardAccessor(norm1) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].norm1, 140581767537728) # norm_hidden_states, gate_msa, shift_mlp, scale_mlp, gate_mlp = self.norm1(hidden_states, emb=temb) # diffusers/src/diffusers/models/transformers/transformer_flux.py:165 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].norm1.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[6].norm1.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].norm1.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].norm1.training, 140591004393440) # norm_hidden_states, gate_msa, shift_mlp, scale_mlp, gate_mlp = self.norm1(hidden_states, emb=temb) # diffusers/src/diffusers/models/transformers/transformer_flux.py:165 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].norm1.emb, accessed_by=DictGetItemGuardAccessor(emb) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].norm1.emb, 140591004478624) # if self.emb is not None: # diffusers/src/diffusers/models/normalization.py:135 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].norm1._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].norm1.norm, accessed_by=DictGetItemGuardAccessor(norm) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].norm1.norm, 140581767537872) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:139 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].norm1.norm.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].norm1.norm.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].norm1.norm.training, 140591004393440) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:139 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].norm1.silu, accessed_by=DictGetItemGuardAccessor(silu) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].norm1.silu, 140581767537776) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].norm1.silu.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].norm1.silu.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].norm1.silu.training, 140591004393440) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].norm1.linear, accessed_by=DictGetItemGuardAccessor(linear) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].norm1.linear, 140581767537824) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].norm1.linear.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].norm1.linear.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].norm1.linear.training, 140591004393440) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].norm1._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].norm1._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].norm1._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].norm1._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].norm2, accessed_by=DictGetItemGuardAccessor(norm2) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].norm2, 140581767539072) # norm_hidden_states = self.norm2(hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:182 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].norm2.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].norm2.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].norm2.training, 140591004393440) # norm_hidden_states = self.norm2(hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:182 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff_context, accessed_by=DictGetItemGuardAccessor(ff_context) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].ff_context, 140581767539408) # context_ff_output = self.ff_context(norm_encoder_hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:198 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff_context.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[6].ff_context.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff_context.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].ff_context.training, 140591004393440) # context_ff_output = self.ff_context(norm_encoder_hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:198 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff_context._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff_context.net, accessed_by=DictGetItemGuardAccessor(net) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].ff_context.net, 140581767539552) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[6].ff_context.net, 93831537618768) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- LENGTH_CHECK: len(L['self'].transformer_blocks[6].ff_context.net) == 3 # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff_context.net.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff_context.net.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].ff_context.net.training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff_context.net[0], accessed_by=GetItemGuardAccessor(0) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].ff_context.net[0], 140581767539504) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff_context.net[0].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[6].ff_context.net[0].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff_context.net[0].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].ff_context.net[0].training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff_context.net[0]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff_context.net[0].proj, accessed_by=DictGetItemGuardAccessor(proj) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].ff_context.net[0].proj, 140581767539600) # hidden_states = self.proj(hidden_states) # diffusers/src/diffusers/models/activations.py:88 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff_context.net[0].proj.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff_context.net[0].proj.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].ff_context.net[0].proj.training, 140591004393440) # hidden_states = self.proj(hidden_states) # diffusers/src/diffusers/models/activations.py:88 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff_context.net[0].approximate, accessed_by=DictGetItemGuardAccessor(approximate) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[6].ff_context.net[0].approximate == 'tanh' # return F.gelu(gate, approximate=self.approximate) # diffusers/src/diffusers/models/activations.py:83 in gelu V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff_context.net[0]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff_context.net[0]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff_context.net[0]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff_context.net[0]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff_context.net[1], accessed_by=GetItemGuardAccessor(1) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].ff_context.net[1], 140581767539696) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff_context.net[1].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff_context.net[1].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].ff_context.net[1].training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff_context.net[2], accessed_by=GetItemGuardAccessor(2) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].ff_context.net[2], 140581767539744) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff_context.net[2].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff_context.net[2].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].ff_context.net[2].training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff_context._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff_context._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff_context._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff_context._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].norm1_context, accessed_by=DictGetItemGuardAccessor(norm1_context) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].norm1_context, 140581767537920) # norm_encoder_hidden_states, c_gate_msa, c_shift_mlp, c_scale_mlp, c_gate_mlp = self.norm1_context( # diffusers/src/diffusers/models/transformers/transformer_flux.py:167 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].norm1_context.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[6].norm1_context.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].norm1_context.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].norm1_context.training, 140591004393440) # norm_encoder_hidden_states, c_gate_msa, c_shift_mlp, c_scale_mlp, c_gate_mlp = self.norm1_context( # diffusers/src/diffusers/models/transformers/transformer_flux.py:167 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].norm1_context.emb, accessed_by=DictGetItemGuardAccessor(emb) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].norm1_context.emb, 140591004478624) # if self.emb is not None: # diffusers/src/diffusers/models/normalization.py:135 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].norm1_context._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].norm1_context.norm, accessed_by=DictGetItemGuardAccessor(norm) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].norm1_context.norm, 140581767538112) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:139 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].norm1_context.norm.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].norm1_context.norm.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].norm1_context.norm.training, 140591004393440) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:139 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].norm1_context.silu, accessed_by=DictGetItemGuardAccessor(silu) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].norm1_context.silu, 140581767538016) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].norm1_context.silu.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].norm1_context.silu.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].norm1_context.silu.training, 140591004393440) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].norm1_context.linear, accessed_by=DictGetItemGuardAccessor(linear) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].norm1_context.linear, 140581767538064) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].norm1_context.linear.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].norm1_context.linear.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].norm1_context.linear.training, 140591004393440) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].norm1_context._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].norm1_context._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].norm1_context._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].norm1_context._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].norm2_context, accessed_by=DictGetItemGuardAccessor(norm2_context) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].norm2_context, 140581767539120) # norm_encoder_hidden_states = self.norm2_context(encoder_hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:195 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].norm2_context.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].norm2_context.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].norm2_context.training, 140591004393440) # norm_encoder_hidden_states = self.norm2_context(encoder_hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:195 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | +- GuardManager: source=L['self'].transformer_blocks[7], accessed_by=GetItemGuardAccessor(7) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7], 140581767537392) # for index_block, block in enumerate(self.transformer_blocks): # diffusers/src/diffusers/models/transformers/transformer_flux.py:471 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[7].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].training, 140591004393440) # for index_block, block in enumerate(self.transformer_blocks): # diffusers/src/diffusers/models/transformers/transformer_flux.py:471 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff, accessed_by=DictGetItemGuardAccessor(ff) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].ff, 140581767541040) # ff_output = self.ff(norm_hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:185 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[7].ff.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].ff.training, 140591004393440) # ff_output = self.ff(norm_hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:185 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff.net, accessed_by=DictGetItemGuardAccessor(net) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].ff.net, 140581767541280) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[7].ff.net, 93831537618768) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- LENGTH_CHECK: len(L['self'].transformer_blocks[7].ff.net) == 3 # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff.net.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff.net.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].ff.net.training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff.net[0], accessed_by=GetItemGuardAccessor(0) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].ff.net[0], 140581767541232) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff.net[0].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[7].ff.net[0].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff.net[0].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].ff.net[0].training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff.net[0]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff.net[0].proj, accessed_by=DictGetItemGuardAccessor(proj) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].ff.net[0].proj, 140581767541328) # hidden_states = self.proj(hidden_states) # diffusers/src/diffusers/models/activations.py:88 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff.net[0].proj.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff.net[0].proj.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].ff.net[0].proj.training, 140591004393440) # hidden_states = self.proj(hidden_states) # diffusers/src/diffusers/models/activations.py:88 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff.net[0].approximate, accessed_by=DictGetItemGuardAccessor(approximate) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[7].ff.net[0].approximate == 'tanh' # return F.gelu(gate, approximate=self.approximate) # diffusers/src/diffusers/models/activations.py:83 in gelu V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff.net[0]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff.net[0]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff.net[0]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff.net[0]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff.net[1], accessed_by=GetItemGuardAccessor(1) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].ff.net[1], 140581767541376) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff.net[1].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff.net[1].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].ff.net[1].training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff.net[2], accessed_by=GetItemGuardAccessor(2) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].ff.net[2], 140581767541424) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff.net[2].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff.net[2].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].ff.net[2].training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn, accessed_by=DictGetItemGuardAccessor(attn) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn, 140581767540272) # attn_output, context_attn_output = self.attn( # diffusers/src/diffusers/models/transformers/transformer_flux.py:172 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[7].attn.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.training, 140591004393440) # attn_output, context_attn_output = self.attn( # diffusers/src/diffusers/models/transformers/transformer_flux.py:172 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_k, accessed_by=DictGetItemGuardAccessor(to_k) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.to_k, 140581767540416) # key = attn.to_k(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1717 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.to_k.training, 140591004393440) # key = attn.to_k(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1717 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_q, accessed_by=DictGetItemGuardAccessor(to_q) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.to_q, 140581767540512) # query = attn.to_q(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1716 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.to_q.training, 140591004393440) # query = attn.to_q(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1716 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_v, accessed_by=DictGetItemGuardAccessor(to_v) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.to_v, 140581767540608) # value = attn.to_v(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1718 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_v.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_v.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.to_v.training, 140591004393440) # value = attn.to_v(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1718 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.norm_k, accessed_by=DictGetItemGuardAccessor(norm_k) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.norm_k, 140581767540464) # if attn.norm_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1729 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.norm_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[7].attn.norm_k.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.norm_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.norm_k.training, 140591004393440) # if attn.norm_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1729 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.norm_k.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[7].attn.norm_k.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.norm_k._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.norm_k.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.norm_k.weight, 140581785355344) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.norm_k._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.norm_k._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.norm_k._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.norm_k._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.norm_q, accessed_by=DictGetItemGuardAccessor(norm_q) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.norm_q, 140581767540368) # if attn.norm_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1727 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.norm_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[7].attn.norm_q.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.norm_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.norm_q.training, 140591004393440) # if attn.norm_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1727 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.norm_q.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[7].attn.norm_q.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.norm_q._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.norm_q.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.norm_q.weight, 140581773232464) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.norm_q._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.norm_q._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.norm_q._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.norm_q._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_out, accessed_by=DictGetItemGuardAccessor(to_out) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.to_out, 140581767540800) # hidden_states = attn.to_out[0](hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1776 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_out.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_out.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.to_out.training, 140591004393440) # hidden_states = attn.to_out[0](hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1776 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_out[0], accessed_by=GetItemGuardAccessor(0) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.to_out[0], 140581767540848) # hidden_states = attn.to_out[0](hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1776 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_out[0].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_out[0].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.to_out[0].training, 140591004393440) # hidden_states = attn.to_out[0](hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1776 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_out[1], accessed_by=GetItemGuardAccessor(1) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.to_out[1], 140581767540896) # hidden_states = attn.to_out[1](hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1778 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_out[1].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_out[1].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.to_out[1].training, 140591004393440) # hidden_states = attn.to_out[1](hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1778 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.add_k_proj, accessed_by=DictGetItemGuardAccessor(add_k_proj) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.add_k_proj, 140581767540656) # encoder_hidden_states_key_proj = attn.add_k_proj(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1736 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.add_k_proj.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.add_k_proj.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.add_k_proj.training, 140591004393440) # encoder_hidden_states_key_proj = attn.add_k_proj(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1736 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.add_q_proj, accessed_by=DictGetItemGuardAccessor(add_q_proj) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.add_q_proj, 140581767540752) # encoder_hidden_states_query_proj = attn.add_q_proj(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1735 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.add_q_proj.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.add_q_proj.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.add_q_proj.training, 140591004393440) # encoder_hidden_states_query_proj = attn.add_q_proj(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1735 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.add_v_proj, accessed_by=DictGetItemGuardAccessor(add_v_proj) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.add_v_proj, 140581767540704) # encoder_hidden_states_value_proj = attn.add_v_proj(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1737 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.add_v_proj.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.add_v_proj.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.add_v_proj.training, 140591004393440) # encoder_hidden_states_value_proj = attn.add_v_proj(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1737 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_add_out, accessed_by=DictGetItemGuardAccessor(to_add_out) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.to_add_out, 140581767540944) # encoder_hidden_states = attn.to_add_out(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1779 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_add_out.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_add_out.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.to_add_out.training, 140591004393440) # encoder_hidden_states = attn.to_add_out(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1779 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.norm_added_k, accessed_by=DictGetItemGuardAccessor(norm_added_k) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.norm_added_k, 140581767541088) # if attn.norm_added_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1751 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.norm_added_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[7].attn.norm_added_k.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.norm_added_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.norm_added_k.training, 140591004393440) # if attn.norm_added_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1751 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.norm_added_k.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[7].attn.norm_added_k.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.norm_added_k._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.norm_added_k.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.norm_added_k.weight, 140581765826096) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.norm_added_k._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.norm_added_k._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.norm_added_k._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.norm_added_k._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.norm_added_q, accessed_by=DictGetItemGuardAccessor(norm_added_q) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.norm_added_q, 140581767540992) # if attn.norm_added_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1749 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.norm_added_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[7].attn.norm_added_q.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.norm_added_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.norm_added_q.training, 140591004393440) # if attn.norm_added_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1749 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.norm_added_q.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[7].attn.norm_added_q.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.norm_added_q._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.norm_added_q.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.norm_added_q.weight, 140581772706816) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.norm_added_q._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.norm_added_q._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.norm_added_q._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.norm_added_q._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.heads, accessed_by=DictGetItemGuardAccessor(heads) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[7].attn.heads == 24 # head_dim = inner_dim // attn.heads # diffusers/src/diffusers/models/attention_processor.py:1721 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.processor, accessed_by=DictGetItemGuardAccessor(processor) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[7].attn.processor, 93831581524080) # attn_parameters = set(inspect.signature(self.processor.__call__).parameters.keys()) # diffusers/src/diffusers/models/attention_processor.py:479 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.processor, 140581767540224) # return self.processor( # diffusers/src/diffusers/models/attention_processor.py:490 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].norm1, accessed_by=DictGetItemGuardAccessor(norm1) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].norm1, 140581767539792) # norm_hidden_states, gate_msa, shift_mlp, scale_mlp, gate_mlp = self.norm1(hidden_states, emb=temb) # diffusers/src/diffusers/models/transformers/transformer_flux.py:165 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].norm1.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[7].norm1.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].norm1.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].norm1.training, 140591004393440) # norm_hidden_states, gate_msa, shift_mlp, scale_mlp, gate_mlp = self.norm1(hidden_states, emb=temb) # diffusers/src/diffusers/models/transformers/transformer_flux.py:165 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].norm1.emb, accessed_by=DictGetItemGuardAccessor(emb) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].norm1.emb, 140591004478624) # if self.emb is not None: # diffusers/src/diffusers/models/normalization.py:135 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].norm1._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].norm1.norm, accessed_by=DictGetItemGuardAccessor(norm) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].norm1.norm, 140581767539936) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:139 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].norm1.norm.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].norm1.norm.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].norm1.norm.training, 140591004393440) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:139 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].norm1.silu, accessed_by=DictGetItemGuardAccessor(silu) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].norm1.silu, 140581767539840) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].norm1.silu.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].norm1.silu.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].norm1.silu.training, 140591004393440) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].norm1.linear, accessed_by=DictGetItemGuardAccessor(linear) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].norm1.linear, 140581767539888) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].norm1.linear.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].norm1.linear.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].norm1.linear.training, 140591004393440) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].norm1._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].norm1._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].norm1._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].norm1._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].norm2, accessed_by=DictGetItemGuardAccessor(norm2) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].norm2, 140581767541136) # norm_hidden_states = self.norm2(hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:182 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].norm2.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].norm2.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].norm2.training, 140591004393440) # norm_hidden_states = self.norm2(hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:182 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff_context, accessed_by=DictGetItemGuardAccessor(ff_context) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].ff_context, 140581767541472) # context_ff_output = self.ff_context(norm_encoder_hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:198 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff_context.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[7].ff_context.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff_context.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].ff_context.training, 140591004393440) # context_ff_output = self.ff_context(norm_encoder_hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:198 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff_context._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff_context.net, accessed_by=DictGetItemGuardAccessor(net) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].ff_context.net, 140581767541616) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[7].ff_context.net, 93831537618768) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- LENGTH_CHECK: len(L['self'].transformer_blocks[7].ff_context.net) == 3 # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff_context.net.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff_context.net.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].ff_context.net.training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff_context.net[0], accessed_by=GetItemGuardAccessor(0) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].ff_context.net[0], 140581767541568) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff_context.net[0].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[7].ff_context.net[0].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff_context.net[0].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].ff_context.net[0].training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff_context.net[0]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff_context.net[0].proj, accessed_by=DictGetItemGuardAccessor(proj) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].ff_context.net[0].proj, 140581767541664) # hidden_states = self.proj(hidden_states) # diffusers/src/diffusers/models/activations.py:88 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff_context.net[0].proj.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff_context.net[0].proj.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].ff_context.net[0].proj.training, 140591004393440) # hidden_states = self.proj(hidden_states) # diffusers/src/diffusers/models/activations.py:88 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff_context.net[0].approximate, accessed_by=DictGetItemGuardAccessor(approximate) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[7].ff_context.net[0].approximate == 'tanh' # return F.gelu(gate, approximate=self.approximate) # diffusers/src/diffusers/models/activations.py:83 in gelu V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff_context.net[0]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff_context.net[0]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff_context.net[0]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff_context.net[0]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff_context.net[1], accessed_by=GetItemGuardAccessor(1) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].ff_context.net[1], 140581767541760) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff_context.net[1].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff_context.net[1].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].ff_context.net[1].training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff_context.net[2], accessed_by=GetItemGuardAccessor(2) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].ff_context.net[2], 140581767541808) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff_context.net[2].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff_context.net[2].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].ff_context.net[2].training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff_context._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff_context._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff_context._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff_context._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].norm1_context, accessed_by=DictGetItemGuardAccessor(norm1_context) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].norm1_context, 140581767539984) # norm_encoder_hidden_states, c_gate_msa, c_shift_mlp, c_scale_mlp, c_gate_mlp = self.norm1_context( # diffusers/src/diffusers/models/transformers/transformer_flux.py:167 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].norm1_context.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[7].norm1_context.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].norm1_context.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].norm1_context.training, 140591004393440) # norm_encoder_hidden_states, c_gate_msa, c_shift_mlp, c_scale_mlp, c_gate_mlp = self.norm1_context( # diffusers/src/diffusers/models/transformers/transformer_flux.py:167 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].norm1_context.emb, accessed_by=DictGetItemGuardAccessor(emb) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].norm1_context.emb, 140591004478624) # if self.emb is not None: # diffusers/src/diffusers/models/normalization.py:135 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].norm1_context._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].norm1_context.norm, accessed_by=DictGetItemGuardAccessor(norm) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].norm1_context.norm, 140581767540176) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:139 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].norm1_context.norm.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].norm1_context.norm.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].norm1_context.norm.training, 140591004393440) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:139 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].norm1_context.silu, accessed_by=DictGetItemGuardAccessor(silu) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].norm1_context.silu, 140581767540080) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].norm1_context.silu.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].norm1_context.silu.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].norm1_context.silu.training, 140591004393440) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].norm1_context.linear, accessed_by=DictGetItemGuardAccessor(linear) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].norm1_context.linear, 140581767540128) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].norm1_context.linear.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].norm1_context.linear.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].norm1_context.linear.training, 140591004393440) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].norm1_context._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].norm1_context._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].norm1_context._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].norm1_context._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].norm2_context, accessed_by=DictGetItemGuardAccessor(norm2_context) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].norm2_context, 140581767541184) # norm_encoder_hidden_states = self.norm2_context(encoder_hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:195 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].norm2_context.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].norm2_context.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].norm2_context.training, 140591004393440) # norm_encoder_hidden_states = self.norm2_context(encoder_hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:195 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | +- GuardManager: source=L['self'].transformer_blocks[8], accessed_by=GetItemGuardAccessor(8) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8], 140581767539456) # for index_block, block in enumerate(self.transformer_blocks): # diffusers/src/diffusers/models/transformers/transformer_flux.py:471 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[8].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].training, 140591004393440) # for index_block, block in enumerate(self.transformer_blocks): # diffusers/src/diffusers/models/transformers/transformer_flux.py:471 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff, accessed_by=DictGetItemGuardAccessor(ff) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].ff, 140581767543104) # ff_output = self.ff(norm_hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:185 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[8].ff.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].ff.training, 140591004393440) # ff_output = self.ff(norm_hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:185 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff.net, accessed_by=DictGetItemGuardAccessor(net) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].ff.net, 140581767543344) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[8].ff.net, 93831537618768) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- LENGTH_CHECK: len(L['self'].transformer_blocks[8].ff.net) == 3 # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff.net.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff.net.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].ff.net.training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff.net[0], accessed_by=GetItemGuardAccessor(0) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].ff.net[0], 140581767543296) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff.net[0].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[8].ff.net[0].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff.net[0].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].ff.net[0].training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff.net[0]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff.net[0].proj, accessed_by=DictGetItemGuardAccessor(proj) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].ff.net[0].proj, 140581767543392) # hidden_states = self.proj(hidden_states) # diffusers/src/diffusers/models/activations.py:88 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff.net[0].proj.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff.net[0].proj.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].ff.net[0].proj.training, 140591004393440) # hidden_states = self.proj(hidden_states) # diffusers/src/diffusers/models/activations.py:88 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff.net[0].approximate, accessed_by=DictGetItemGuardAccessor(approximate) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[8].ff.net[0].approximate == 'tanh' # return F.gelu(gate, approximate=self.approximate) # diffusers/src/diffusers/models/activations.py:83 in gelu V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff.net[0]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff.net[0]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff.net[0]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff.net[0]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff.net[1], accessed_by=GetItemGuardAccessor(1) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].ff.net[1], 140581767543440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff.net[1].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff.net[1].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].ff.net[1].training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff.net[2], accessed_by=GetItemGuardAccessor(2) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].ff.net[2], 140581767543488) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff.net[2].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff.net[2].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].ff.net[2].training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn, accessed_by=DictGetItemGuardAccessor(attn) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn, 140581767542336) # attn_output, context_attn_output = self.attn( # diffusers/src/diffusers/models/transformers/transformer_flux.py:172 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[8].attn.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.training, 140591004393440) # attn_output, context_attn_output = self.attn( # diffusers/src/diffusers/models/transformers/transformer_flux.py:172 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_k, accessed_by=DictGetItemGuardAccessor(to_k) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.to_k, 140581767542480) # key = attn.to_k(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1717 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.to_k.training, 140591004393440) # key = attn.to_k(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1717 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_q, accessed_by=DictGetItemGuardAccessor(to_q) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.to_q, 140581767542576) # query = attn.to_q(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1716 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.to_q.training, 140591004393440) # query = attn.to_q(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1716 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_v, accessed_by=DictGetItemGuardAccessor(to_v) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.to_v, 140581767542672) # value = attn.to_v(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1718 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_v.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_v.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.to_v.training, 140591004393440) # value = attn.to_v(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1718 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.norm_k, accessed_by=DictGetItemGuardAccessor(norm_k) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.norm_k, 140581767542528) # if attn.norm_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1729 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.norm_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[8].attn.norm_k.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.norm_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.norm_k.training, 140591004393440) # if attn.norm_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1729 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.norm_k.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[8].attn.norm_k.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.norm_k._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.norm_k.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.norm_k.weight, 140581774129120) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.norm_k._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.norm_k._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.norm_k._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.norm_k._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.norm_q, accessed_by=DictGetItemGuardAccessor(norm_q) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.norm_q, 140581767542432) # if attn.norm_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1727 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.norm_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[8].attn.norm_q.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.norm_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.norm_q.training, 140591004393440) # if attn.norm_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1727 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.norm_q.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[8].attn.norm_q.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.norm_q._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.norm_q.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.norm_q.weight, 140581772718176) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.norm_q._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.norm_q._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.norm_q._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.norm_q._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_out, accessed_by=DictGetItemGuardAccessor(to_out) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.to_out, 140581767542864) # hidden_states = attn.to_out[0](hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1776 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_out.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_out.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.to_out.training, 140591004393440) # hidden_states = attn.to_out[0](hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1776 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_out[0], accessed_by=GetItemGuardAccessor(0) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.to_out[0], 140581767542912) # hidden_states = attn.to_out[0](hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1776 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_out[0].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_out[0].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.to_out[0].training, 140591004393440) # hidden_states = attn.to_out[0](hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1776 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_out[1], accessed_by=GetItemGuardAccessor(1) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.to_out[1], 140581767542960) # hidden_states = attn.to_out[1](hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1778 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_out[1].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_out[1].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.to_out[1].training, 140591004393440) # hidden_states = attn.to_out[1](hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1778 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.add_k_proj, accessed_by=DictGetItemGuardAccessor(add_k_proj) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.add_k_proj, 140581767542720) # encoder_hidden_states_key_proj = attn.add_k_proj(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1736 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.add_k_proj.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.add_k_proj.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.add_k_proj.training, 140591004393440) # encoder_hidden_states_key_proj = attn.add_k_proj(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1736 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.add_q_proj, accessed_by=DictGetItemGuardAccessor(add_q_proj) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.add_q_proj, 140581767542816) # encoder_hidden_states_query_proj = attn.add_q_proj(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1735 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.add_q_proj.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.add_q_proj.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.add_q_proj.training, 140591004393440) # encoder_hidden_states_query_proj = attn.add_q_proj(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1735 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.add_v_proj, accessed_by=DictGetItemGuardAccessor(add_v_proj) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.add_v_proj, 140581767542768) # encoder_hidden_states_value_proj = attn.add_v_proj(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1737 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.add_v_proj.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.add_v_proj.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.add_v_proj.training, 140591004393440) # encoder_hidden_states_value_proj = attn.add_v_proj(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1737 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_add_out, accessed_by=DictGetItemGuardAccessor(to_add_out) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.to_add_out, 140581767543008) # encoder_hidden_states = attn.to_add_out(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1779 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_add_out.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_add_out.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.to_add_out.training, 140591004393440) # encoder_hidden_states = attn.to_add_out(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1779 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.norm_added_k, accessed_by=DictGetItemGuardAccessor(norm_added_k) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.norm_added_k, 140581767543152) # if attn.norm_added_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1751 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.norm_added_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[8].attn.norm_added_k.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.norm_added_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.norm_added_k.training, 140591004393440) # if attn.norm_added_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1751 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.norm_added_k.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[8].attn.norm_added_k.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.norm_added_k._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.norm_added_k.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.norm_added_k.weight, 140581772720576) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.norm_added_k._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.norm_added_k._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.norm_added_k._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.norm_added_k._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.norm_added_q, accessed_by=DictGetItemGuardAccessor(norm_added_q) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.norm_added_q, 140581767543056) # if attn.norm_added_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1749 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.norm_added_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[8].attn.norm_added_q.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.norm_added_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.norm_added_q.training, 140591004393440) # if attn.norm_added_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1749 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.norm_added_q.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[8].attn.norm_added_q.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.norm_added_q._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.norm_added_q.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.norm_added_q.weight, 140581773245744) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.norm_added_q._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.norm_added_q._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.norm_added_q._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.norm_added_q._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.heads, accessed_by=DictGetItemGuardAccessor(heads) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[8].attn.heads == 24 # head_dim = inner_dim // attn.heads # diffusers/src/diffusers/models/attention_processor.py:1721 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.processor, accessed_by=DictGetItemGuardAccessor(processor) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[8].attn.processor, 93831581524080) # attn_parameters = set(inspect.signature(self.processor.__call__).parameters.keys()) # diffusers/src/diffusers/models/attention_processor.py:479 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.processor, 140581767542288) # return self.processor( # diffusers/src/diffusers/models/attention_processor.py:490 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].norm1, accessed_by=DictGetItemGuardAccessor(norm1) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].norm1, 140581767541856) # norm_hidden_states, gate_msa, shift_mlp, scale_mlp, gate_mlp = self.norm1(hidden_states, emb=temb) # diffusers/src/diffusers/models/transformers/transformer_flux.py:165 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].norm1.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[8].norm1.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].norm1.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].norm1.training, 140591004393440) # norm_hidden_states, gate_msa, shift_mlp, scale_mlp, gate_mlp = self.norm1(hidden_states, emb=temb) # diffusers/src/diffusers/models/transformers/transformer_flux.py:165 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].norm1.emb, accessed_by=DictGetItemGuardAccessor(emb) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].norm1.emb, 140591004478624) # if self.emb is not None: # diffusers/src/diffusers/models/normalization.py:135 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].norm1._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].norm1.norm, accessed_by=DictGetItemGuardAccessor(norm) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].norm1.norm, 140581767542000) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:139 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].norm1.norm.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].norm1.norm.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].norm1.norm.training, 140591004393440) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:139 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].norm1.silu, accessed_by=DictGetItemGuardAccessor(silu) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].norm1.silu, 140581767541904) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].norm1.silu.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].norm1.silu.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].norm1.silu.training, 140591004393440) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].norm1.linear, accessed_by=DictGetItemGuardAccessor(linear) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].norm1.linear, 140581767541952) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].norm1.linear.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].norm1.linear.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].norm1.linear.training, 140591004393440) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].norm1._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].norm1._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].norm1._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].norm1._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].norm2, accessed_by=DictGetItemGuardAccessor(norm2) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].norm2, 140581767543200) # norm_hidden_states = self.norm2(hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:182 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].norm2.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].norm2.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].norm2.training, 140591004393440) # norm_hidden_states = self.norm2(hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:182 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff_context, accessed_by=DictGetItemGuardAccessor(ff_context) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].ff_context, 140581767543536) # context_ff_output = self.ff_context(norm_encoder_hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:198 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff_context.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[8].ff_context.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff_context.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].ff_context.training, 140591004393440) # context_ff_output = self.ff_context(norm_encoder_hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:198 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff_context._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff_context.net, accessed_by=DictGetItemGuardAccessor(net) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].ff_context.net, 140581767543680) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[8].ff_context.net, 93831537618768) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- LENGTH_CHECK: len(L['self'].transformer_blocks[8].ff_context.net) == 3 # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff_context.net.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff_context.net.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].ff_context.net.training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff_context.net[0], accessed_by=GetItemGuardAccessor(0) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].ff_context.net[0], 140581767543632) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff_context.net[0].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[8].ff_context.net[0].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff_context.net[0].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].ff_context.net[0].training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff_context.net[0]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff_context.net[0].proj, accessed_by=DictGetItemGuardAccessor(proj) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].ff_context.net[0].proj, 140581767543728) # hidden_states = self.proj(hidden_states) # diffusers/src/diffusers/models/activations.py:88 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff_context.net[0].proj.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff_context.net[0].proj.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].ff_context.net[0].proj.training, 140591004393440) # hidden_states = self.proj(hidden_states) # diffusers/src/diffusers/models/activations.py:88 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff_context.net[0].approximate, accessed_by=DictGetItemGuardAccessor(approximate) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[8].ff_context.net[0].approximate == 'tanh' # return F.gelu(gate, approximate=self.approximate) # diffusers/src/diffusers/models/activations.py:83 in gelu V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff_context.net[0]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff_context.net[0]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff_context.net[0]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff_context.net[0]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff_context.net[1], accessed_by=GetItemGuardAccessor(1) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].ff_context.net[1], 140581767543824) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff_context.net[1].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff_context.net[1].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].ff_context.net[1].training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff_context.net[2], accessed_by=GetItemGuardAccessor(2) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].ff_context.net[2], 140581767543872) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff_context.net[2].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff_context.net[2].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].ff_context.net[2].training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff_context._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff_context._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff_context._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff_context._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].norm1_context, accessed_by=DictGetItemGuardAccessor(norm1_context) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].norm1_context, 140581767542048) # norm_encoder_hidden_states, c_gate_msa, c_shift_mlp, c_scale_mlp, c_gate_mlp = self.norm1_context( # diffusers/src/diffusers/models/transformers/transformer_flux.py:167 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].norm1_context.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[8].norm1_context.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].norm1_context.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].norm1_context.training, 140591004393440) # norm_encoder_hidden_states, c_gate_msa, c_shift_mlp, c_scale_mlp, c_gate_mlp = self.norm1_context( # diffusers/src/diffusers/models/transformers/transformer_flux.py:167 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].norm1_context.emb, accessed_by=DictGetItemGuardAccessor(emb) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].norm1_context.emb, 140591004478624) # if self.emb is not None: # diffusers/src/diffusers/models/normalization.py:135 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].norm1_context._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].norm1_context.norm, accessed_by=DictGetItemGuardAccessor(norm) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].norm1_context.norm, 140581767542240) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:139 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].norm1_context.norm.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].norm1_context.norm.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].norm1_context.norm.training, 140591004393440) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:139 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].norm1_context.silu, accessed_by=DictGetItemGuardAccessor(silu) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].norm1_context.silu, 140581767542144) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].norm1_context.silu.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].norm1_context.silu.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].norm1_context.silu.training, 140591004393440) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].norm1_context.linear, accessed_by=DictGetItemGuardAccessor(linear) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].norm1_context.linear, 140581767542192) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].norm1_context.linear.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].norm1_context.linear.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].norm1_context.linear.training, 140591004393440) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].norm1_context._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].norm1_context._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].norm1_context._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].norm1_context._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].norm2_context, accessed_by=DictGetItemGuardAccessor(norm2_context) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].norm2_context, 140581767543248) # norm_encoder_hidden_states = self.norm2_context(encoder_hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:195 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].norm2_context.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].norm2_context.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].norm2_context.training, 140591004393440) # norm_encoder_hidden_states = self.norm2_context(encoder_hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:195 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | +- GuardManager: source=L['self'].transformer_blocks[9], accessed_by=GetItemGuardAccessor(9) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9], 140581767541520) # for index_block, block in enumerate(self.transformer_blocks): # diffusers/src/diffusers/models/transformers/transformer_flux.py:471 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[9].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].training, 140591004393440) # for index_block, block in enumerate(self.transformer_blocks): # diffusers/src/diffusers/models/transformers/transformer_flux.py:471 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff, accessed_by=DictGetItemGuardAccessor(ff) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].ff, 140581769888144) # ff_output = self.ff(norm_hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:185 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[9].ff.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].ff.training, 140591004393440) # ff_output = self.ff(norm_hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:185 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff.net, accessed_by=DictGetItemGuardAccessor(net) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].ff.net, 140581769888384) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[9].ff.net, 93831537618768) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- LENGTH_CHECK: len(L['self'].transformer_blocks[9].ff.net) == 3 # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff.net.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff.net.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].ff.net.training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff.net[0], accessed_by=GetItemGuardAccessor(0) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].ff.net[0], 140581769888336) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff.net[0].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[9].ff.net[0].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff.net[0].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].ff.net[0].training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff.net[0]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff.net[0].proj, accessed_by=DictGetItemGuardAccessor(proj) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].ff.net[0].proj, 140581769888432) # hidden_states = self.proj(hidden_states) # diffusers/src/diffusers/models/activations.py:88 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff.net[0].proj.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff.net[0].proj.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].ff.net[0].proj.training, 140591004393440) # hidden_states = self.proj(hidden_states) # diffusers/src/diffusers/models/activations.py:88 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff.net[0].approximate, accessed_by=DictGetItemGuardAccessor(approximate) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[9].ff.net[0].approximate == 'tanh' # return F.gelu(gate, approximate=self.approximate) # diffusers/src/diffusers/models/activations.py:83 in gelu V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff.net[0]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff.net[0]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff.net[0]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff.net[0]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff.net[1], accessed_by=GetItemGuardAccessor(1) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].ff.net[1], 140581769888480) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff.net[1].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff.net[1].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].ff.net[1].training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff.net[2], accessed_by=GetItemGuardAccessor(2) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].ff.net[2], 140581769888528) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff.net[2].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff.net[2].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].ff.net[2].training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn, accessed_by=DictGetItemGuardAccessor(attn) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn, 140581767544400) # attn_output, context_attn_output = self.attn( # diffusers/src/diffusers/models/transformers/transformer_flux.py:172 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[9].attn.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.training, 140591004393440) # attn_output, context_attn_output = self.attn( # diffusers/src/diffusers/models/transformers/transformer_flux.py:172 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_k, accessed_by=DictGetItemGuardAccessor(to_k) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.to_k, 140581767544544) # key = attn.to_k(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1717 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.to_k.training, 140591004393440) # key = attn.to_k(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1717 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_q, accessed_by=DictGetItemGuardAccessor(to_q) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.to_q, 140581767544640) # query = attn.to_q(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1716 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.to_q.training, 140591004393440) # query = attn.to_q(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1716 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_v, accessed_by=DictGetItemGuardAccessor(to_v) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.to_v, 140581767544736) # value = attn.to_v(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1718 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_v.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_v.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.to_v.training, 140591004393440) # value = attn.to_v(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1718 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.norm_k, accessed_by=DictGetItemGuardAccessor(norm_k) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.norm_k, 140581767544592) # if attn.norm_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1729 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.norm_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[9].attn.norm_k.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.norm_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.norm_k.training, 140591004393440) # if attn.norm_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1729 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.norm_k.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[9].attn.norm_k.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.norm_k._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.norm_k.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.norm_k.weight, 140581765826736) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.norm_k._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.norm_k._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.norm_k._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.norm_k._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.norm_q, accessed_by=DictGetItemGuardAccessor(norm_q) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.norm_q, 140581767544496) # if attn.norm_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1727 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.norm_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[9].attn.norm_q.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.norm_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.norm_q.training, 140591004393440) # if attn.norm_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1727 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.norm_q.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[9].attn.norm_q.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.norm_q._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.norm_q.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.norm_q.weight, 140581772717536) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.norm_q._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.norm_q._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.norm_q._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.norm_q._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_out, accessed_by=DictGetItemGuardAccessor(to_out) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.to_out, 140581769887904) # hidden_states = attn.to_out[0](hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1776 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_out.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_out.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.to_out.training, 140591004393440) # hidden_states = attn.to_out[0](hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1776 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_out[0], accessed_by=GetItemGuardAccessor(0) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.to_out[0], 140581769887952) # hidden_states = attn.to_out[0](hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1776 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_out[0].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_out[0].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.to_out[0].training, 140591004393440) # hidden_states = attn.to_out[0](hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1776 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_out[1], accessed_by=GetItemGuardAccessor(1) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.to_out[1], 140581769888000) # hidden_states = attn.to_out[1](hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1778 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_out[1].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_out[1].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.to_out[1].training, 140591004393440) # hidden_states = attn.to_out[1](hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1778 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.add_k_proj, accessed_by=DictGetItemGuardAccessor(add_k_proj) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.add_k_proj, 140581767544784) # encoder_hidden_states_key_proj = attn.add_k_proj(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1736 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.add_k_proj.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.add_k_proj.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.add_k_proj.training, 140591004393440) # encoder_hidden_states_key_proj = attn.add_k_proj(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1736 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.add_q_proj, accessed_by=DictGetItemGuardAccessor(add_q_proj) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.add_q_proj, 140581769887856) # encoder_hidden_states_query_proj = attn.add_q_proj(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1735 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.add_q_proj.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.add_q_proj.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.add_q_proj.training, 140591004393440) # encoder_hidden_states_query_proj = attn.add_q_proj(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1735 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.add_v_proj, accessed_by=DictGetItemGuardAccessor(add_v_proj) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.add_v_proj, 140581769887808) # encoder_hidden_states_value_proj = attn.add_v_proj(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1737 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.add_v_proj.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.add_v_proj.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.add_v_proj.training, 140591004393440) # encoder_hidden_states_value_proj = attn.add_v_proj(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1737 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_add_out, accessed_by=DictGetItemGuardAccessor(to_add_out) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.to_add_out, 140581769888048) # encoder_hidden_states = attn.to_add_out(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1779 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_add_out.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_add_out.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.to_add_out.training, 140591004393440) # encoder_hidden_states = attn.to_add_out(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1779 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.norm_added_k, accessed_by=DictGetItemGuardAccessor(norm_added_k) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.norm_added_k, 140581769888192) # if attn.norm_added_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1751 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.norm_added_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[9].attn.norm_added_k.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.norm_added_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.norm_added_k.training, 140591004393440) # if attn.norm_added_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1751 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.norm_added_k.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[9].attn.norm_added_k.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.norm_added_k._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.norm_added_k.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.norm_added_k.weight, 140581765826976) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.norm_added_k._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.norm_added_k._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.norm_added_k._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.norm_added_k._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.norm_added_q, accessed_by=DictGetItemGuardAccessor(norm_added_q) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.norm_added_q, 140581769888096) # if attn.norm_added_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1749 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.norm_added_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[9].attn.norm_added_q.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.norm_added_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.norm_added_q.training, 140591004393440) # if attn.norm_added_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1749 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.norm_added_q.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[9].attn.norm_added_q.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.norm_added_q._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.norm_added_q.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.norm_added_q.weight, 140581772714736) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.norm_added_q._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.norm_added_q._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.norm_added_q._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.norm_added_q._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.heads, accessed_by=DictGetItemGuardAccessor(heads) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[9].attn.heads == 24 # head_dim = inner_dim // attn.heads # diffusers/src/diffusers/models/attention_processor.py:1721 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.processor, accessed_by=DictGetItemGuardAccessor(processor) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[9].attn.processor, 93831581524080) # attn_parameters = set(inspect.signature(self.processor.__call__).parameters.keys()) # diffusers/src/diffusers/models/attention_processor.py:479 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.processor, 140581767544352) # return self.processor( # diffusers/src/diffusers/models/attention_processor.py:490 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].norm1, accessed_by=DictGetItemGuardAccessor(norm1) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].norm1, 140581767543920) # norm_hidden_states, gate_msa, shift_mlp, scale_mlp, gate_mlp = self.norm1(hidden_states, emb=temb) # diffusers/src/diffusers/models/transformers/transformer_flux.py:165 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].norm1.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[9].norm1.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].norm1.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].norm1.training, 140591004393440) # norm_hidden_states, gate_msa, shift_mlp, scale_mlp, gate_mlp = self.norm1(hidden_states, emb=temb) # diffusers/src/diffusers/models/transformers/transformer_flux.py:165 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].norm1.emb, accessed_by=DictGetItemGuardAccessor(emb) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].norm1.emb, 140591004478624) # if self.emb is not None: # diffusers/src/diffusers/models/normalization.py:135 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].norm1._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].norm1.norm, accessed_by=DictGetItemGuardAccessor(norm) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].norm1.norm, 140581767544064) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:139 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].norm1.norm.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].norm1.norm.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].norm1.norm.training, 140591004393440) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:139 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].norm1.silu, accessed_by=DictGetItemGuardAccessor(silu) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].norm1.silu, 140581767543968) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].norm1.silu.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].norm1.silu.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].norm1.silu.training, 140591004393440) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].norm1.linear, accessed_by=DictGetItemGuardAccessor(linear) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].norm1.linear, 140581767544016) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].norm1.linear.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].norm1.linear.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].norm1.linear.training, 140591004393440) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].norm1._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].norm1._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].norm1._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].norm1._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].norm2, accessed_by=DictGetItemGuardAccessor(norm2) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].norm2, 140581769888240) # norm_hidden_states = self.norm2(hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:182 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].norm2.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].norm2.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].norm2.training, 140591004393440) # norm_hidden_states = self.norm2(hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:182 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff_context, accessed_by=DictGetItemGuardAccessor(ff_context) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].ff_context, 140581769888576) # context_ff_output = self.ff_context(norm_encoder_hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:198 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff_context.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[9].ff_context.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff_context.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].ff_context.training, 140591004393440) # context_ff_output = self.ff_context(norm_encoder_hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:198 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff_context._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff_context.net, accessed_by=DictGetItemGuardAccessor(net) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].ff_context.net, 140581769888720) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[9].ff_context.net, 93831537618768) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- LENGTH_CHECK: len(L['self'].transformer_blocks[9].ff_context.net) == 3 # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff_context.net.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff_context.net.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].ff_context.net.training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff_context.net[0], accessed_by=GetItemGuardAccessor(0) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].ff_context.net[0], 140581769888672) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff_context.net[0].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[9].ff_context.net[0].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff_context.net[0].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].ff_context.net[0].training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff_context.net[0]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff_context.net[0].proj, accessed_by=DictGetItemGuardAccessor(proj) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].ff_context.net[0].proj, 140581769888768) # hidden_states = self.proj(hidden_states) # diffusers/src/diffusers/models/activations.py:88 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff_context.net[0].proj.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff_context.net[0].proj.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].ff_context.net[0].proj.training, 140591004393440) # hidden_states = self.proj(hidden_states) # diffusers/src/diffusers/models/activations.py:88 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff_context.net[0].approximate, accessed_by=DictGetItemGuardAccessor(approximate) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[9].ff_context.net[0].approximate == 'tanh' # return F.gelu(gate, approximate=self.approximate) # diffusers/src/diffusers/models/activations.py:83 in gelu V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff_context.net[0]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff_context.net[0]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff_context.net[0]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff_context.net[0]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff_context.net[1], accessed_by=GetItemGuardAccessor(1) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].ff_context.net[1], 140581769888864) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff_context.net[1].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff_context.net[1].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].ff_context.net[1].training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff_context.net[2], accessed_by=GetItemGuardAccessor(2) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].ff_context.net[2], 140581769888912) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff_context.net[2].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff_context.net[2].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].ff_context.net[2].training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff_context._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff_context._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff_context._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff_context._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].norm1_context, accessed_by=DictGetItemGuardAccessor(norm1_context) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].norm1_context, 140581767544112) # norm_encoder_hidden_states, c_gate_msa, c_shift_mlp, c_scale_mlp, c_gate_mlp = self.norm1_context( # diffusers/src/diffusers/models/transformers/transformer_flux.py:167 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].norm1_context.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[9].norm1_context.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].norm1_context.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].norm1_context.training, 140591004393440) # norm_encoder_hidden_states, c_gate_msa, c_shift_mlp, c_scale_mlp, c_gate_mlp = self.norm1_context( # diffusers/src/diffusers/models/transformers/transformer_flux.py:167 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].norm1_context.emb, accessed_by=DictGetItemGuardAccessor(emb) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].norm1_context.emb, 140591004478624) # if self.emb is not None: # diffusers/src/diffusers/models/normalization.py:135 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].norm1_context._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].norm1_context.norm, accessed_by=DictGetItemGuardAccessor(norm) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].norm1_context.norm, 140581767544304) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:139 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].norm1_context.norm.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].norm1_context.norm.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].norm1_context.norm.training, 140591004393440) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:139 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].norm1_context.silu, accessed_by=DictGetItemGuardAccessor(silu) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].norm1_context.silu, 140581767544208) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].norm1_context.silu.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].norm1_context.silu.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].norm1_context.silu.training, 140591004393440) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].norm1_context.linear, accessed_by=DictGetItemGuardAccessor(linear) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].norm1_context.linear, 140581767544256) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].norm1_context.linear.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].norm1_context.linear.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].norm1_context.linear.training, 140591004393440) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].norm1_context._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].norm1_context._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].norm1_context._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].norm1_context._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].norm2_context, accessed_by=DictGetItemGuardAccessor(norm2_context) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].norm2_context, 140581769888288) # norm_encoder_hidden_states = self.norm2_context(encoder_hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:195 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].norm2_context.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].norm2_context.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].norm2_context.training, 140591004393440) # norm_encoder_hidden_states = self.norm2_context(encoder_hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:195 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | +- GuardManager: source=L['self'].transformer_blocks[10], accessed_by=GetItemGuardAccessor(10) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10], 140581767543584) # for index_block, block in enumerate(self.transformer_blocks): # diffusers/src/diffusers/models/transformers/transformer_flux.py:471 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[10].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].training, 140591004393440) # for index_block, block in enumerate(self.transformer_blocks): # diffusers/src/diffusers/models/transformers/transformer_flux.py:471 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff, accessed_by=DictGetItemGuardAccessor(ff) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].ff, 140581769890208) # ff_output = self.ff(norm_hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:185 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[10].ff.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].ff.training, 140591004393440) # ff_output = self.ff(norm_hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:185 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff.net, accessed_by=DictGetItemGuardAccessor(net) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].ff.net, 140581769890448) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[10].ff.net, 93831537618768) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- LENGTH_CHECK: len(L['self'].transformer_blocks[10].ff.net) == 3 # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff.net.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff.net.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].ff.net.training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff.net[0], accessed_by=GetItemGuardAccessor(0) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].ff.net[0], 140581769890400) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff.net[0].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[10].ff.net[0].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff.net[0].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].ff.net[0].training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff.net[0]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff.net[0].proj, accessed_by=DictGetItemGuardAccessor(proj) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].ff.net[0].proj, 140581769890496) # hidden_states = self.proj(hidden_states) # diffusers/src/diffusers/models/activations.py:88 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff.net[0].proj.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff.net[0].proj.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].ff.net[0].proj.training, 140591004393440) # hidden_states = self.proj(hidden_states) # diffusers/src/diffusers/models/activations.py:88 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff.net[0].approximate, accessed_by=DictGetItemGuardAccessor(approximate) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[10].ff.net[0].approximate == 'tanh' # return F.gelu(gate, approximate=self.approximate) # diffusers/src/diffusers/models/activations.py:83 in gelu V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff.net[0]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff.net[0]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff.net[0]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff.net[0]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff.net[1], accessed_by=GetItemGuardAccessor(1) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].ff.net[1], 140581769890544) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff.net[1].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff.net[1].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].ff.net[1].training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff.net[2], accessed_by=GetItemGuardAccessor(2) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].ff.net[2], 140581769890592) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff.net[2].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff.net[2].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].ff.net[2].training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn, accessed_by=DictGetItemGuardAccessor(attn) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn, 140581769889440) # attn_output, context_attn_output = self.attn( # diffusers/src/diffusers/models/transformers/transformer_flux.py:172 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[10].attn.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.training, 140591004393440) # attn_output, context_attn_output = self.attn( # diffusers/src/diffusers/models/transformers/transformer_flux.py:172 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_k, accessed_by=DictGetItemGuardAccessor(to_k) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.to_k, 140581769889584) # key = attn.to_k(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1717 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.to_k.training, 140591004393440) # key = attn.to_k(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1717 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_q, accessed_by=DictGetItemGuardAccessor(to_q) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.to_q, 140581769889680) # query = attn.to_q(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1716 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.to_q.training, 140591004393440) # query = attn.to_q(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1716 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_v, accessed_by=DictGetItemGuardAccessor(to_v) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.to_v, 140581769889776) # value = attn.to_v(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1718 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_v.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_v.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.to_v.training, 140591004393440) # value = attn.to_v(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1718 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.norm_k, accessed_by=DictGetItemGuardAccessor(norm_k) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.norm_k, 140581769889632) # if attn.norm_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1729 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.norm_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[10].attn.norm_k.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.norm_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.norm_k.training, 140591004393440) # if attn.norm_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1729 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.norm_k.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[10].attn.norm_k.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.norm_k._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.norm_k.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.norm_k.weight, 140581765992896) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.norm_k._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.norm_k._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.norm_k._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.norm_k._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.norm_q, accessed_by=DictGetItemGuardAccessor(norm_q) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.norm_q, 140581769889536) # if attn.norm_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1727 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.norm_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[10].attn.norm_q.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.norm_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.norm_q.training, 140591004393440) # if attn.norm_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1727 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.norm_q.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[10].attn.norm_q.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.norm_q._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.norm_q.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.norm_q.weight, 140581765992976) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.norm_q._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.norm_q._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.norm_q._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.norm_q._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_out, accessed_by=DictGetItemGuardAccessor(to_out) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.to_out, 140581769889968) # hidden_states = attn.to_out[0](hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1776 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_out.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_out.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.to_out.training, 140591004393440) # hidden_states = attn.to_out[0](hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1776 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_out[0], accessed_by=GetItemGuardAccessor(0) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.to_out[0], 140581769890016) # hidden_states = attn.to_out[0](hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1776 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_out[0].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_out[0].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.to_out[0].training, 140591004393440) # hidden_states = attn.to_out[0](hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1776 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_out[1], accessed_by=GetItemGuardAccessor(1) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.to_out[1], 140581769890064) # hidden_states = attn.to_out[1](hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1778 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_out[1].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_out[1].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.to_out[1].training, 140591004393440) # hidden_states = attn.to_out[1](hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1778 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.add_k_proj, accessed_by=DictGetItemGuardAccessor(add_k_proj) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.add_k_proj, 140581769889824) # encoder_hidden_states_key_proj = attn.add_k_proj(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1736 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.add_k_proj.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.add_k_proj.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.add_k_proj.training, 140591004393440) # encoder_hidden_states_key_proj = attn.add_k_proj(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1736 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.add_q_proj, accessed_by=DictGetItemGuardAccessor(add_q_proj) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.add_q_proj, 140581769889920) # encoder_hidden_states_query_proj = attn.add_q_proj(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1735 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.add_q_proj.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.add_q_proj.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.add_q_proj.training, 140591004393440) # encoder_hidden_states_query_proj = attn.add_q_proj(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1735 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.add_v_proj, accessed_by=DictGetItemGuardAccessor(add_v_proj) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.add_v_proj, 140581769889872) # encoder_hidden_states_value_proj = attn.add_v_proj(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1737 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.add_v_proj.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.add_v_proj.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.add_v_proj.training, 140591004393440) # encoder_hidden_states_value_proj = attn.add_v_proj(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1737 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_add_out, accessed_by=DictGetItemGuardAccessor(to_add_out) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.to_add_out, 140581769890112) # encoder_hidden_states = attn.to_add_out(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1779 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_add_out.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_add_out.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.to_add_out.training, 140591004393440) # encoder_hidden_states = attn.to_add_out(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1779 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.norm_added_k, accessed_by=DictGetItemGuardAccessor(norm_added_k) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.norm_added_k, 140581769890256) # if attn.norm_added_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1751 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.norm_added_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[10].attn.norm_added_k.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.norm_added_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.norm_added_k.training, 140591004393440) # if attn.norm_added_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1751 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.norm_added_k.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[10].attn.norm_added_k.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.norm_added_k._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.norm_added_k.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.norm_added_k.weight, 140581765992736) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.norm_added_k._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.norm_added_k._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.norm_added_k._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.norm_added_k._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.norm_added_q, accessed_by=DictGetItemGuardAccessor(norm_added_q) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.norm_added_q, 140581769890160) # if attn.norm_added_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1749 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.norm_added_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[10].attn.norm_added_q.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.norm_added_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.norm_added_q.training, 140591004393440) # if attn.norm_added_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1749 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.norm_added_q.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[10].attn.norm_added_q.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.norm_added_q._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.norm_added_q.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.norm_added_q.weight, 140581765992816) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.norm_added_q._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.norm_added_q._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.norm_added_q._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.norm_added_q._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.heads, accessed_by=DictGetItemGuardAccessor(heads) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[10].attn.heads == 24 # head_dim = inner_dim // attn.heads # diffusers/src/diffusers/models/attention_processor.py:1721 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.processor, accessed_by=DictGetItemGuardAccessor(processor) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[10].attn.processor, 93831581524080) # attn_parameters = set(inspect.signature(self.processor.__call__).parameters.keys()) # diffusers/src/diffusers/models/attention_processor.py:479 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.processor, 140581769889392) # return self.processor( # diffusers/src/diffusers/models/attention_processor.py:490 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].norm1, accessed_by=DictGetItemGuardAccessor(norm1) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].norm1, 140581769888960) # norm_hidden_states, gate_msa, shift_mlp, scale_mlp, gate_mlp = self.norm1(hidden_states, emb=temb) # diffusers/src/diffusers/models/transformers/transformer_flux.py:165 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].norm1.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[10].norm1.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].norm1.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].norm1.training, 140591004393440) # norm_hidden_states, gate_msa, shift_mlp, scale_mlp, gate_mlp = self.norm1(hidden_states, emb=temb) # diffusers/src/diffusers/models/transformers/transformer_flux.py:165 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].norm1.emb, accessed_by=DictGetItemGuardAccessor(emb) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].norm1.emb, 140591004478624) # if self.emb is not None: # diffusers/src/diffusers/models/normalization.py:135 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].norm1._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].norm1.norm, accessed_by=DictGetItemGuardAccessor(norm) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].norm1.norm, 140581769889104) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:139 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].norm1.norm.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].norm1.norm.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].norm1.norm.training, 140591004393440) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:139 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].norm1.silu, accessed_by=DictGetItemGuardAccessor(silu) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].norm1.silu, 140581769889008) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].norm1.silu.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].norm1.silu.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].norm1.silu.training, 140591004393440) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].norm1.linear, accessed_by=DictGetItemGuardAccessor(linear) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].norm1.linear, 140581769889056) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].norm1.linear.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].norm1.linear.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].norm1.linear.training, 140591004393440) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].norm1._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].norm1._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].norm1._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].norm1._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].norm2, accessed_by=DictGetItemGuardAccessor(norm2) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].norm2, 140581769890304) # norm_hidden_states = self.norm2(hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:182 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].norm2.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].norm2.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].norm2.training, 140591004393440) # norm_hidden_states = self.norm2(hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:182 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff_context, accessed_by=DictGetItemGuardAccessor(ff_context) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].ff_context, 140581769890640) # context_ff_output = self.ff_context(norm_encoder_hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:198 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff_context.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[10].ff_context.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff_context.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].ff_context.training, 140591004393440) # context_ff_output = self.ff_context(norm_encoder_hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:198 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff_context._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff_context.net, accessed_by=DictGetItemGuardAccessor(net) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].ff_context.net, 140581769890784) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[10].ff_context.net, 93831537618768) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- LENGTH_CHECK: len(L['self'].transformer_blocks[10].ff_context.net) == 3 # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff_context.net.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff_context.net.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].ff_context.net.training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff_context.net[0], accessed_by=GetItemGuardAccessor(0) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].ff_context.net[0], 140581769890736) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff_context.net[0].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[10].ff_context.net[0].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff_context.net[0].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].ff_context.net[0].training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff_context.net[0]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff_context.net[0].proj, accessed_by=DictGetItemGuardAccessor(proj) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].ff_context.net[0].proj, 140581769890832) # hidden_states = self.proj(hidden_states) # diffusers/src/diffusers/models/activations.py:88 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff_context.net[0].proj.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff_context.net[0].proj.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].ff_context.net[0].proj.training, 140591004393440) # hidden_states = self.proj(hidden_states) # diffusers/src/diffusers/models/activations.py:88 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff_context.net[0].approximate, accessed_by=DictGetItemGuardAccessor(approximate) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[10].ff_context.net[0].approximate == 'tanh' # return F.gelu(gate, approximate=self.approximate) # diffusers/src/diffusers/models/activations.py:83 in gelu V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff_context.net[0]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff_context.net[0]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff_context.net[0]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff_context.net[0]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff_context.net[1], accessed_by=GetItemGuardAccessor(1) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].ff_context.net[1], 140581769890928) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff_context.net[1].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff_context.net[1].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].ff_context.net[1].training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff_context.net[2], accessed_by=GetItemGuardAccessor(2) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].ff_context.net[2], 140581769890976) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff_context.net[2].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff_context.net[2].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].ff_context.net[2].training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff_context._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff_context._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff_context._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff_context._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].norm1_context, accessed_by=DictGetItemGuardAccessor(norm1_context) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].norm1_context, 140581769889152) # norm_encoder_hidden_states, c_gate_msa, c_shift_mlp, c_scale_mlp, c_gate_mlp = self.norm1_context( # diffusers/src/diffusers/models/transformers/transformer_flux.py:167 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].norm1_context.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[10].norm1_context.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].norm1_context.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].norm1_context.training, 140591004393440) # norm_encoder_hidden_states, c_gate_msa, c_shift_mlp, c_scale_mlp, c_gate_mlp = self.norm1_context( # diffusers/src/diffusers/models/transformers/transformer_flux.py:167 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].norm1_context.emb, accessed_by=DictGetItemGuardAccessor(emb) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].norm1_context.emb, 140591004478624) # if self.emb is not None: # diffusers/src/diffusers/models/normalization.py:135 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].norm1_context._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].norm1_context.norm, accessed_by=DictGetItemGuardAccessor(norm) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].norm1_context.norm, 140581769889344) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:139 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].norm1_context.norm.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].norm1_context.norm.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].norm1_context.norm.training, 140591004393440) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:139 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].norm1_context.silu, accessed_by=DictGetItemGuardAccessor(silu) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].norm1_context.silu, 140581769889248) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].norm1_context.silu.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].norm1_context.silu.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].norm1_context.silu.training, 140591004393440) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].norm1_context.linear, accessed_by=DictGetItemGuardAccessor(linear) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].norm1_context.linear, 140581769889296) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].norm1_context.linear.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].norm1_context.linear.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].norm1_context.linear.training, 140591004393440) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].norm1_context._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].norm1_context._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].norm1_context._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].norm1_context._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].norm2_context, accessed_by=DictGetItemGuardAccessor(norm2_context) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].norm2_context, 140581769890352) # norm_encoder_hidden_states = self.norm2_context(encoder_hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:195 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].norm2_context.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].norm2_context.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].norm2_context.training, 140591004393440) # norm_encoder_hidden_states = self.norm2_context(encoder_hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:195 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | +- GuardManager: source=L['self'].transformer_blocks[11], accessed_by=GetItemGuardAccessor(11) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11], 140581769888624) # for index_block, block in enumerate(self.transformer_blocks): # diffusers/src/diffusers/models/transformers/transformer_flux.py:471 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[11].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].training, 140591004393440) # for index_block, block in enumerate(self.transformer_blocks): # diffusers/src/diffusers/models/transformers/transformer_flux.py:471 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff, accessed_by=DictGetItemGuardAccessor(ff) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].ff, 140581769892272) # ff_output = self.ff(norm_hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:185 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[11].ff.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].ff.training, 140591004393440) # ff_output = self.ff(norm_hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:185 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff.net, accessed_by=DictGetItemGuardAccessor(net) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].ff.net, 140581769892512) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[11].ff.net, 93831537618768) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- LENGTH_CHECK: len(L['self'].transformer_blocks[11].ff.net) == 3 # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff.net.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff.net.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].ff.net.training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff.net[0], accessed_by=GetItemGuardAccessor(0) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].ff.net[0], 140581769892464) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff.net[0].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[11].ff.net[0].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff.net[0].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].ff.net[0].training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff.net[0]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff.net[0].proj, accessed_by=DictGetItemGuardAccessor(proj) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].ff.net[0].proj, 140581769892560) # hidden_states = self.proj(hidden_states) # diffusers/src/diffusers/models/activations.py:88 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff.net[0].proj.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff.net[0].proj.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].ff.net[0].proj.training, 140591004393440) # hidden_states = self.proj(hidden_states) # diffusers/src/diffusers/models/activations.py:88 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff.net[0].approximate, accessed_by=DictGetItemGuardAccessor(approximate) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[11].ff.net[0].approximate == 'tanh' # return F.gelu(gate, approximate=self.approximate) # diffusers/src/diffusers/models/activations.py:83 in gelu V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff.net[0]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff.net[0]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff.net[0]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff.net[0]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff.net[1], accessed_by=GetItemGuardAccessor(1) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].ff.net[1], 140581769892608) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff.net[1].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff.net[1].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].ff.net[1].training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff.net[2], accessed_by=GetItemGuardAccessor(2) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].ff.net[2], 140581769892656) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff.net[2].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff.net[2].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].ff.net[2].training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn, accessed_by=DictGetItemGuardAccessor(attn) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn, 140581769891504) # attn_output, context_attn_output = self.attn( # diffusers/src/diffusers/models/transformers/transformer_flux.py:172 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[11].attn.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.training, 140591004393440) # attn_output, context_attn_output = self.attn( # diffusers/src/diffusers/models/transformers/transformer_flux.py:172 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_k, accessed_by=DictGetItemGuardAccessor(to_k) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.to_k, 140581769891648) # key = attn.to_k(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1717 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.to_k.training, 140591004393440) # key = attn.to_k(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1717 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_q, accessed_by=DictGetItemGuardAccessor(to_q) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.to_q, 140581769891744) # query = attn.to_q(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1716 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.to_q.training, 140591004393440) # query = attn.to_q(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1716 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_v, accessed_by=DictGetItemGuardAccessor(to_v) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.to_v, 140581769891840) # value = attn.to_v(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1718 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_v.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_v.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.to_v.training, 140591004393440) # value = attn.to_v(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1718 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.norm_k, accessed_by=DictGetItemGuardAccessor(norm_k) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.norm_k, 140581769891696) # if attn.norm_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1729 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.norm_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[11].attn.norm_k.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.norm_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.norm_k.training, 140591004393440) # if attn.norm_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1729 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.norm_k.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[11].attn.norm_k.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.norm_k._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.norm_k.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.norm_k.weight, 140581765994416) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.norm_k._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.norm_k._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.norm_k._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.norm_k._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.norm_q, accessed_by=DictGetItemGuardAccessor(norm_q) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.norm_q, 140581769891600) # if attn.norm_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1727 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.norm_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[11].attn.norm_q.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.norm_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.norm_q.training, 140591004393440) # if attn.norm_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1727 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.norm_q.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[11].attn.norm_q.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.norm_q._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.norm_q.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.norm_q.weight, 140581765994496) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.norm_q._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.norm_q._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.norm_q._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.norm_q._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_out, accessed_by=DictGetItemGuardAccessor(to_out) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.to_out, 140581769892032) # hidden_states = attn.to_out[0](hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1776 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_out.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_out.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.to_out.training, 140591004393440) # hidden_states = attn.to_out[0](hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1776 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_out[0], accessed_by=GetItemGuardAccessor(0) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.to_out[0], 140581769892080) # hidden_states = attn.to_out[0](hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1776 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_out[0].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_out[0].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.to_out[0].training, 140591004393440) # hidden_states = attn.to_out[0](hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1776 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_out[1], accessed_by=GetItemGuardAccessor(1) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.to_out[1], 140581769892128) # hidden_states = attn.to_out[1](hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1778 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_out[1].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_out[1].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.to_out[1].training, 140591004393440) # hidden_states = attn.to_out[1](hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1778 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.add_k_proj, accessed_by=DictGetItemGuardAccessor(add_k_proj) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.add_k_proj, 140581769891888) # encoder_hidden_states_key_proj = attn.add_k_proj(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1736 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.add_k_proj.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.add_k_proj.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.add_k_proj.training, 140591004393440) # encoder_hidden_states_key_proj = attn.add_k_proj(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1736 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.add_q_proj, accessed_by=DictGetItemGuardAccessor(add_q_proj) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.add_q_proj, 140581769891984) # encoder_hidden_states_query_proj = attn.add_q_proj(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1735 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.add_q_proj.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.add_q_proj.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.add_q_proj.training, 140591004393440) # encoder_hidden_states_query_proj = attn.add_q_proj(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1735 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.add_v_proj, accessed_by=DictGetItemGuardAccessor(add_v_proj) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.add_v_proj, 140581769891936) # encoder_hidden_states_value_proj = attn.add_v_proj(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1737 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.add_v_proj.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.add_v_proj.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.add_v_proj.training, 140591004393440) # encoder_hidden_states_value_proj = attn.add_v_proj(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1737 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_add_out, accessed_by=DictGetItemGuardAccessor(to_add_out) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.to_add_out, 140581769892176) # encoder_hidden_states = attn.to_add_out(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1779 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_add_out.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_add_out.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.to_add_out.training, 140591004393440) # encoder_hidden_states = attn.to_add_out(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1779 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.norm_added_k, accessed_by=DictGetItemGuardAccessor(norm_added_k) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.norm_added_k, 140581769892320) # if attn.norm_added_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1751 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.norm_added_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[11].attn.norm_added_k.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.norm_added_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.norm_added_k.training, 140591004393440) # if attn.norm_added_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1751 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.norm_added_k.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[11].attn.norm_added_k.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.norm_added_k._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.norm_added_k.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.norm_added_k.weight, 140581765994256) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.norm_added_k._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.norm_added_k._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.norm_added_k._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.norm_added_k._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.norm_added_q, accessed_by=DictGetItemGuardAccessor(norm_added_q) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.norm_added_q, 140581769892224) # if attn.norm_added_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1749 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.norm_added_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[11].attn.norm_added_q.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.norm_added_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.norm_added_q.training, 140591004393440) # if attn.norm_added_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1749 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.norm_added_q.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[11].attn.norm_added_q.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.norm_added_q._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.norm_added_q.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.norm_added_q.weight, 140581765994336) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.norm_added_q._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.norm_added_q._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.norm_added_q._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.norm_added_q._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.heads, accessed_by=DictGetItemGuardAccessor(heads) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[11].attn.heads == 24 # head_dim = inner_dim // attn.heads # diffusers/src/diffusers/models/attention_processor.py:1721 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.processor, accessed_by=DictGetItemGuardAccessor(processor) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[11].attn.processor, 93831581524080) # attn_parameters = set(inspect.signature(self.processor.__call__).parameters.keys()) # diffusers/src/diffusers/models/attention_processor.py:479 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.processor, 140581769891456) # return self.processor( # diffusers/src/diffusers/models/attention_processor.py:490 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].norm1, accessed_by=DictGetItemGuardAccessor(norm1) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].norm1, 140581769891024) # norm_hidden_states, gate_msa, shift_mlp, scale_mlp, gate_mlp = self.norm1(hidden_states, emb=temb) # diffusers/src/diffusers/models/transformers/transformer_flux.py:165 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].norm1.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[11].norm1.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].norm1.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].norm1.training, 140591004393440) # norm_hidden_states, gate_msa, shift_mlp, scale_mlp, gate_mlp = self.norm1(hidden_states, emb=temb) # diffusers/src/diffusers/models/transformers/transformer_flux.py:165 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].norm1.emb, accessed_by=DictGetItemGuardAccessor(emb) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].norm1.emb, 140591004478624) # if self.emb is not None: # diffusers/src/diffusers/models/normalization.py:135 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].norm1._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].norm1.norm, accessed_by=DictGetItemGuardAccessor(norm) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].norm1.norm, 140581769891168) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:139 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].norm1.norm.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].norm1.norm.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].norm1.norm.training, 140591004393440) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:139 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].norm1.silu, accessed_by=DictGetItemGuardAccessor(silu) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].norm1.silu, 140581769891072) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].norm1.silu.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].norm1.silu.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].norm1.silu.training, 140591004393440) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].norm1.linear, accessed_by=DictGetItemGuardAccessor(linear) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].norm1.linear, 140581769891120) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].norm1.linear.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].norm1.linear.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].norm1.linear.training, 140591004393440) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].norm1._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].norm1._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].norm1._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].norm1._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].norm2, accessed_by=DictGetItemGuardAccessor(norm2) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].norm2, 140581769892368) # norm_hidden_states = self.norm2(hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:182 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].norm2.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].norm2.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].norm2.training, 140591004393440) # norm_hidden_states = self.norm2(hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:182 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff_context, accessed_by=DictGetItemGuardAccessor(ff_context) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].ff_context, 140581769892704) # context_ff_output = self.ff_context(norm_encoder_hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:198 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff_context.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[11].ff_context.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff_context.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].ff_context.training, 140591004393440) # context_ff_output = self.ff_context(norm_encoder_hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:198 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff_context._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff_context.net, accessed_by=DictGetItemGuardAccessor(net) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].ff_context.net, 140581769892848) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[11].ff_context.net, 93831537618768) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- LENGTH_CHECK: len(L['self'].transformer_blocks[11].ff_context.net) == 3 # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff_context.net.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff_context.net.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].ff_context.net.training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff_context.net[0], accessed_by=GetItemGuardAccessor(0) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].ff_context.net[0], 140581769892800) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff_context.net[0].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[11].ff_context.net[0].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff_context.net[0].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].ff_context.net[0].training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff_context.net[0]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff_context.net[0].proj, accessed_by=DictGetItemGuardAccessor(proj) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].ff_context.net[0].proj, 140581769892896) # hidden_states = self.proj(hidden_states) # diffusers/src/diffusers/models/activations.py:88 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff_context.net[0].proj.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff_context.net[0].proj.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].ff_context.net[0].proj.training, 140591004393440) # hidden_states = self.proj(hidden_states) # diffusers/src/diffusers/models/activations.py:88 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff_context.net[0].approximate, accessed_by=DictGetItemGuardAccessor(approximate) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[11].ff_context.net[0].approximate == 'tanh' # return F.gelu(gate, approximate=self.approximate) # diffusers/src/diffusers/models/activations.py:83 in gelu V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff_context.net[0]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff_context.net[0]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff_context.net[0]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff_context.net[0]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff_context.net[1], accessed_by=GetItemGuardAccessor(1) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].ff_context.net[1], 140581769892992) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff_context.net[1].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff_context.net[1].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].ff_context.net[1].training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff_context.net[2], accessed_by=GetItemGuardAccessor(2) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].ff_context.net[2], 140581769893040) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff_context.net[2].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff_context.net[2].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].ff_context.net[2].training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff_context._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff_context._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff_context._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff_context._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].norm1_context, accessed_by=DictGetItemGuardAccessor(norm1_context) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].norm1_context, 140581769891216) # norm_encoder_hidden_states, c_gate_msa, c_shift_mlp, c_scale_mlp, c_gate_mlp = self.norm1_context( # diffusers/src/diffusers/models/transformers/transformer_flux.py:167 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].norm1_context.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[11].norm1_context.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].norm1_context.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].norm1_context.training, 140591004393440) # norm_encoder_hidden_states, c_gate_msa, c_shift_mlp, c_scale_mlp, c_gate_mlp = self.norm1_context( # diffusers/src/diffusers/models/transformers/transformer_flux.py:167 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].norm1_context.emb, accessed_by=DictGetItemGuardAccessor(emb) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].norm1_context.emb, 140591004478624) # if self.emb is not None: # diffusers/src/diffusers/models/normalization.py:135 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].norm1_context._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].norm1_context.norm, accessed_by=DictGetItemGuardAccessor(norm) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].norm1_context.norm, 140581769891408) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:139 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].norm1_context.norm.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].norm1_context.norm.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].norm1_context.norm.training, 140591004393440) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:139 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].norm1_context.silu, accessed_by=DictGetItemGuardAccessor(silu) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].norm1_context.silu, 140581769891312) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].norm1_context.silu.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].norm1_context.silu.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].norm1_context.silu.training, 140591004393440) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].norm1_context.linear, accessed_by=DictGetItemGuardAccessor(linear) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].norm1_context.linear, 140581769891360) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].norm1_context.linear.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].norm1_context.linear.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].norm1_context.linear.training, 140591004393440) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].norm1_context._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].norm1_context._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].norm1_context._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].norm1_context._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].norm2_context, accessed_by=DictGetItemGuardAccessor(norm2_context) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].norm2_context, 140581769892416) # norm_encoder_hidden_states = self.norm2_context(encoder_hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:195 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].norm2_context.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].norm2_context.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].norm2_context.training, 140591004393440) # norm_encoder_hidden_states = self.norm2_context(encoder_hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:195 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | +- GuardManager: source=L['self'].transformer_blocks[12], accessed_by=GetItemGuardAccessor(12) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12], 140581769890688) # for index_block, block in enumerate(self.transformer_blocks): # diffusers/src/diffusers/models/transformers/transformer_flux.py:471 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[12].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].training, 140591004393440) # for index_block, block in enumerate(self.transformer_blocks): # diffusers/src/diffusers/models/transformers/transformer_flux.py:471 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff, accessed_by=DictGetItemGuardAccessor(ff) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].ff, 140581769894336) # ff_output = self.ff(norm_hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:185 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[12].ff.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].ff.training, 140591004393440) # ff_output = self.ff(norm_hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:185 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff.net, accessed_by=DictGetItemGuardAccessor(net) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].ff.net, 140581769894576) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[12].ff.net, 93831537618768) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- LENGTH_CHECK: len(L['self'].transformer_blocks[12].ff.net) == 3 # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff.net.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff.net.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].ff.net.training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff.net[0], accessed_by=GetItemGuardAccessor(0) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].ff.net[0], 140581769894528) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff.net[0].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[12].ff.net[0].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff.net[0].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].ff.net[0].training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff.net[0]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff.net[0].proj, accessed_by=DictGetItemGuardAccessor(proj) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].ff.net[0].proj, 140581769894624) # hidden_states = self.proj(hidden_states) # diffusers/src/diffusers/models/activations.py:88 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff.net[0].proj.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff.net[0].proj.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].ff.net[0].proj.training, 140591004393440) # hidden_states = self.proj(hidden_states) # diffusers/src/diffusers/models/activations.py:88 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff.net[0].approximate, accessed_by=DictGetItemGuardAccessor(approximate) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[12].ff.net[0].approximate == 'tanh' # return F.gelu(gate, approximate=self.approximate) # diffusers/src/diffusers/models/activations.py:83 in gelu V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff.net[0]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff.net[0]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff.net[0]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff.net[0]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff.net[1], accessed_by=GetItemGuardAccessor(1) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].ff.net[1], 140581769894672) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff.net[1].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff.net[1].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].ff.net[1].training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff.net[2], accessed_by=GetItemGuardAccessor(2) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].ff.net[2], 140581769894720) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff.net[2].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff.net[2].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].ff.net[2].training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn, accessed_by=DictGetItemGuardAccessor(attn) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn, 140581769893568) # attn_output, context_attn_output = self.attn( # diffusers/src/diffusers/models/transformers/transformer_flux.py:172 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[12].attn.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.training, 140591004393440) # attn_output, context_attn_output = self.attn( # diffusers/src/diffusers/models/transformers/transformer_flux.py:172 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_k, accessed_by=DictGetItemGuardAccessor(to_k) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.to_k, 140581769893712) # key = attn.to_k(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1717 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.to_k.training, 140591004393440) # key = attn.to_k(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1717 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_q, accessed_by=DictGetItemGuardAccessor(to_q) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.to_q, 140581769893808) # query = attn.to_q(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1716 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.to_q.training, 140591004393440) # query = attn.to_q(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1716 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_v, accessed_by=DictGetItemGuardAccessor(to_v) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.to_v, 140581769893904) # value = attn.to_v(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1718 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_v.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_v.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.to_v.training, 140591004393440) # value = attn.to_v(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1718 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.norm_k, accessed_by=DictGetItemGuardAccessor(norm_k) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.norm_k, 140581769893760) # if attn.norm_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1729 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.norm_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[12].attn.norm_k.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.norm_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.norm_k.training, 140591004393440) # if attn.norm_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1729 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.norm_k.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[12].attn.norm_k.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.norm_k._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.norm_k.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.norm_k.weight, 140581765995376) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.norm_k._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.norm_k._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.norm_k._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.norm_k._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.norm_q, accessed_by=DictGetItemGuardAccessor(norm_q) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.norm_q, 140581769893664) # if attn.norm_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1727 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.norm_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[12].attn.norm_q.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.norm_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.norm_q.training, 140591004393440) # if attn.norm_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1727 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.norm_q.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[12].attn.norm_q.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.norm_q._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.norm_q.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.norm_q.weight, 140581765995696) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.norm_q._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.norm_q._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.norm_q._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.norm_q._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_out, accessed_by=DictGetItemGuardAccessor(to_out) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.to_out, 140581769894096) # hidden_states = attn.to_out[0](hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1776 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_out.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_out.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.to_out.training, 140591004393440) # hidden_states = attn.to_out[0](hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1776 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_out[0], accessed_by=GetItemGuardAccessor(0) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.to_out[0], 140581769894144) # hidden_states = attn.to_out[0](hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1776 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_out[0].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_out[0].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.to_out[0].training, 140591004393440) # hidden_states = attn.to_out[0](hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1776 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_out[1], accessed_by=GetItemGuardAccessor(1) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.to_out[1], 140581769894192) # hidden_states = attn.to_out[1](hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1778 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_out[1].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_out[1].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.to_out[1].training, 140591004393440) # hidden_states = attn.to_out[1](hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1778 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.add_k_proj, accessed_by=DictGetItemGuardAccessor(add_k_proj) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.add_k_proj, 140581769893952) # encoder_hidden_states_key_proj = attn.add_k_proj(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1736 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.add_k_proj.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.add_k_proj.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.add_k_proj.training, 140591004393440) # encoder_hidden_states_key_proj = attn.add_k_proj(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1736 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.add_q_proj, accessed_by=DictGetItemGuardAccessor(add_q_proj) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.add_q_proj, 140581769894048) # encoder_hidden_states_query_proj = attn.add_q_proj(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1735 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.add_q_proj.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.add_q_proj.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.add_q_proj.training, 140591004393440) # encoder_hidden_states_query_proj = attn.add_q_proj(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1735 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.add_v_proj, accessed_by=DictGetItemGuardAccessor(add_v_proj) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.add_v_proj, 140581769894000) # encoder_hidden_states_value_proj = attn.add_v_proj(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1737 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.add_v_proj.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.add_v_proj.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.add_v_proj.training, 140591004393440) # encoder_hidden_states_value_proj = attn.add_v_proj(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1737 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_add_out, accessed_by=DictGetItemGuardAccessor(to_add_out) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.to_add_out, 140581769894240) # encoder_hidden_states = attn.to_add_out(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1779 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_add_out.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_add_out.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.to_add_out.training, 140591004393440) # encoder_hidden_states = attn.to_add_out(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1779 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.norm_added_k, accessed_by=DictGetItemGuardAccessor(norm_added_k) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.norm_added_k, 140581769894384) # if attn.norm_added_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1751 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.norm_added_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[12].attn.norm_added_k.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.norm_added_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.norm_added_k.training, 140591004393440) # if attn.norm_added_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1751 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.norm_added_k.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[12].attn.norm_added_k.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.norm_added_k._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.norm_added_k.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.norm_added_k.weight, 140581773230944) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.norm_added_k._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.norm_added_k._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.norm_added_k._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.norm_added_k._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.norm_added_q, accessed_by=DictGetItemGuardAccessor(norm_added_q) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.norm_added_q, 140581769894288) # if attn.norm_added_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1749 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.norm_added_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[12].attn.norm_added_q.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.norm_added_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.norm_added_q.training, 140591004393440) # if attn.norm_added_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1749 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.norm_added_q.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[12].attn.norm_added_q.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.norm_added_q._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.norm_added_q.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.norm_added_q.weight, 140581785355984) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.norm_added_q._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.norm_added_q._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.norm_added_q._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.norm_added_q._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.heads, accessed_by=DictGetItemGuardAccessor(heads) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[12].attn.heads == 24 # head_dim = inner_dim // attn.heads # diffusers/src/diffusers/models/attention_processor.py:1721 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.processor, accessed_by=DictGetItemGuardAccessor(processor) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[12].attn.processor, 93831581524080) # attn_parameters = set(inspect.signature(self.processor.__call__).parameters.keys()) # diffusers/src/diffusers/models/attention_processor.py:479 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.processor, 140581769893520) # return self.processor( # diffusers/src/diffusers/models/attention_processor.py:490 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].norm1, accessed_by=DictGetItemGuardAccessor(norm1) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].norm1, 140581769893088) # norm_hidden_states, gate_msa, shift_mlp, scale_mlp, gate_mlp = self.norm1(hidden_states, emb=temb) # diffusers/src/diffusers/models/transformers/transformer_flux.py:165 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].norm1.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[12].norm1.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].norm1.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].norm1.training, 140591004393440) # norm_hidden_states, gate_msa, shift_mlp, scale_mlp, gate_mlp = self.norm1(hidden_states, emb=temb) # diffusers/src/diffusers/models/transformers/transformer_flux.py:165 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].norm1.emb, accessed_by=DictGetItemGuardAccessor(emb) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].norm1.emb, 140591004478624) # if self.emb is not None: # diffusers/src/diffusers/models/normalization.py:135 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].norm1._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].norm1.norm, accessed_by=DictGetItemGuardAccessor(norm) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].norm1.norm, 140581769893232) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:139 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].norm1.norm.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].norm1.norm.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].norm1.norm.training, 140591004393440) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:139 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].norm1.silu, accessed_by=DictGetItemGuardAccessor(silu) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].norm1.silu, 140581769893136) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].norm1.silu.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].norm1.silu.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].norm1.silu.training, 140591004393440) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].norm1.linear, accessed_by=DictGetItemGuardAccessor(linear) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].norm1.linear, 140581769893184) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].norm1.linear.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].norm1.linear.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].norm1.linear.training, 140591004393440) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].norm1._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].norm1._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].norm1._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].norm1._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].norm2, accessed_by=DictGetItemGuardAccessor(norm2) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].norm2, 140581769894432) # norm_hidden_states = self.norm2(hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:182 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].norm2.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].norm2.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].norm2.training, 140591004393440) # norm_hidden_states = self.norm2(hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:182 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff_context, accessed_by=DictGetItemGuardAccessor(ff_context) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].ff_context, 140581769894768) # context_ff_output = self.ff_context(norm_encoder_hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:198 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff_context.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[12].ff_context.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff_context.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].ff_context.training, 140591004393440) # context_ff_output = self.ff_context(norm_encoder_hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:198 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff_context._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff_context.net, accessed_by=DictGetItemGuardAccessor(net) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].ff_context.net, 140581769894912) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[12].ff_context.net, 93831537618768) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- LENGTH_CHECK: len(L['self'].transformer_blocks[12].ff_context.net) == 3 # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff_context.net.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff_context.net.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].ff_context.net.training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff_context.net[0], accessed_by=GetItemGuardAccessor(0) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].ff_context.net[0], 140581769894864) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff_context.net[0].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[12].ff_context.net[0].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff_context.net[0].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].ff_context.net[0].training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff_context.net[0]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff_context.net[0].proj, accessed_by=DictGetItemGuardAccessor(proj) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].ff_context.net[0].proj, 140581769894960) # hidden_states = self.proj(hidden_states) # diffusers/src/diffusers/models/activations.py:88 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff_context.net[0].proj.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff_context.net[0].proj.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].ff_context.net[0].proj.training, 140591004393440) # hidden_states = self.proj(hidden_states) # diffusers/src/diffusers/models/activations.py:88 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff_context.net[0].approximate, accessed_by=DictGetItemGuardAccessor(approximate) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[12].ff_context.net[0].approximate == 'tanh' # return F.gelu(gate, approximate=self.approximate) # diffusers/src/diffusers/models/activations.py:83 in gelu V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff_context.net[0]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff_context.net[0]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff_context.net[0]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff_context.net[0]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff_context.net[1], accessed_by=GetItemGuardAccessor(1) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].ff_context.net[1], 140581769895056) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff_context.net[1].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff_context.net[1].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].ff_context.net[1].training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff_context.net[2], accessed_by=GetItemGuardAccessor(2) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].ff_context.net[2], 140581769895104) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff_context.net[2].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff_context.net[2].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].ff_context.net[2].training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff_context._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff_context._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff_context._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff_context._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].norm1_context, accessed_by=DictGetItemGuardAccessor(norm1_context) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].norm1_context, 140581769893280) # norm_encoder_hidden_states, c_gate_msa, c_shift_mlp, c_scale_mlp, c_gate_mlp = self.norm1_context( # diffusers/src/diffusers/models/transformers/transformer_flux.py:167 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].norm1_context.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[12].norm1_context.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].norm1_context.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].norm1_context.training, 140591004393440) # norm_encoder_hidden_states, c_gate_msa, c_shift_mlp, c_scale_mlp, c_gate_mlp = self.norm1_context( # diffusers/src/diffusers/models/transformers/transformer_flux.py:167 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].norm1_context.emb, accessed_by=DictGetItemGuardAccessor(emb) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].norm1_context.emb, 140591004478624) # if self.emb is not None: # diffusers/src/diffusers/models/normalization.py:135 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].norm1_context._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].norm1_context.norm, accessed_by=DictGetItemGuardAccessor(norm) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].norm1_context.norm, 140581769893472) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:139 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].norm1_context.norm.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].norm1_context.norm.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].norm1_context.norm.training, 140591004393440) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:139 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].norm1_context.silu, accessed_by=DictGetItemGuardAccessor(silu) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].norm1_context.silu, 140581769893376) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].norm1_context.silu.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].norm1_context.silu.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].norm1_context.silu.training, 140591004393440) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].norm1_context.linear, accessed_by=DictGetItemGuardAccessor(linear) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].norm1_context.linear, 140581769893424) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].norm1_context.linear.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].norm1_context.linear.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].norm1_context.linear.training, 140591004393440) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].norm1_context._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].norm1_context._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].norm1_context._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].norm1_context._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].norm2_context, accessed_by=DictGetItemGuardAccessor(norm2_context) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].norm2_context, 140581769894480) # norm_encoder_hidden_states = self.norm2_context(encoder_hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:195 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].norm2_context.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].norm2_context.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].norm2_context.training, 140591004393440) # norm_encoder_hidden_states = self.norm2_context(encoder_hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:195 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | +- GuardManager: source=L['self'].transformer_blocks[13], accessed_by=GetItemGuardAccessor(13) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13], 140581769892752) # for index_block, block in enumerate(self.transformer_blocks): # diffusers/src/diffusers/models/transformers/transformer_flux.py:471 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[13].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].training, 140591004393440) # for index_block, block in enumerate(self.transformer_blocks): # diffusers/src/diffusers/models/transformers/transformer_flux.py:471 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff, accessed_by=DictGetItemGuardAccessor(ff) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].ff, 140581769896400) # ff_output = self.ff(norm_hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:185 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[13].ff.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].ff.training, 140591004393440) # ff_output = self.ff(norm_hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:185 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff.net, accessed_by=DictGetItemGuardAccessor(net) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].ff.net, 140581769896640) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[13].ff.net, 93831537618768) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- LENGTH_CHECK: len(L['self'].transformer_blocks[13].ff.net) == 3 # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff.net.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff.net.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].ff.net.training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff.net[0], accessed_by=GetItemGuardAccessor(0) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].ff.net[0], 140581769896592) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff.net[0].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[13].ff.net[0].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff.net[0].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].ff.net[0].training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff.net[0]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff.net[0].proj, accessed_by=DictGetItemGuardAccessor(proj) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].ff.net[0].proj, 140581769896688) # hidden_states = self.proj(hidden_states) # diffusers/src/diffusers/models/activations.py:88 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff.net[0].proj.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff.net[0].proj.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].ff.net[0].proj.training, 140591004393440) # hidden_states = self.proj(hidden_states) # diffusers/src/diffusers/models/activations.py:88 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff.net[0].approximate, accessed_by=DictGetItemGuardAccessor(approximate) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[13].ff.net[0].approximate == 'tanh' # return F.gelu(gate, approximate=self.approximate) # diffusers/src/diffusers/models/activations.py:83 in gelu V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff.net[0]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff.net[0]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff.net[0]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff.net[0]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff.net[1], accessed_by=GetItemGuardAccessor(1) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].ff.net[1], 140581769896736) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff.net[1].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff.net[1].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].ff.net[1].training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff.net[2], accessed_by=GetItemGuardAccessor(2) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].ff.net[2], 140581769896784) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff.net[2].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff.net[2].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].ff.net[2].training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn, accessed_by=DictGetItemGuardAccessor(attn) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn, 140581769895632) # attn_output, context_attn_output = self.attn( # diffusers/src/diffusers/models/transformers/transformer_flux.py:172 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[13].attn.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.training, 140591004393440) # attn_output, context_attn_output = self.attn( # diffusers/src/diffusers/models/transformers/transformer_flux.py:172 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_k, accessed_by=DictGetItemGuardAccessor(to_k) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.to_k, 140581769895776) # key = attn.to_k(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1717 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.to_k.training, 140591004393440) # key = attn.to_k(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1717 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_q, accessed_by=DictGetItemGuardAccessor(to_q) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.to_q, 140581769895872) # query = attn.to_q(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1716 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.to_q.training, 140591004393440) # query = attn.to_q(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1716 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_v, accessed_by=DictGetItemGuardAccessor(to_v) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.to_v, 140581769895968) # value = attn.to_v(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1718 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_v.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_v.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.to_v.training, 140591004393440) # value = attn.to_v(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1718 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.norm_k, accessed_by=DictGetItemGuardAccessor(norm_k) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.norm_k, 140581769895824) # if attn.norm_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1729 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.norm_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[13].attn.norm_k.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.norm_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.norm_k.training, 140591004393440) # if attn.norm_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1729 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.norm_k.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[13].attn.norm_k.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.norm_k._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.norm_k.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.norm_k.weight, 140581765997456) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.norm_k._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.norm_k._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.norm_k._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.norm_k._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.norm_q, accessed_by=DictGetItemGuardAccessor(norm_q) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.norm_q, 140581769895728) # if attn.norm_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1727 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.norm_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[13].attn.norm_q.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.norm_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.norm_q.training, 140591004393440) # if attn.norm_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1727 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.norm_q.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[13].attn.norm_q.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.norm_q._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.norm_q.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.norm_q.weight, 140581765997536) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.norm_q._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.norm_q._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.norm_q._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.norm_q._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_out, accessed_by=DictGetItemGuardAccessor(to_out) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.to_out, 140581769896160) # hidden_states = attn.to_out[0](hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1776 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_out.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_out.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.to_out.training, 140591004393440) # hidden_states = attn.to_out[0](hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1776 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_out[0], accessed_by=GetItemGuardAccessor(0) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.to_out[0], 140581769896208) # hidden_states = attn.to_out[0](hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1776 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_out[0].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_out[0].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.to_out[0].training, 140591004393440) # hidden_states = attn.to_out[0](hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1776 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_out[1], accessed_by=GetItemGuardAccessor(1) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.to_out[1], 140581769896256) # hidden_states = attn.to_out[1](hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1778 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_out[1].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_out[1].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.to_out[1].training, 140591004393440) # hidden_states = attn.to_out[1](hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1778 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.add_k_proj, accessed_by=DictGetItemGuardAccessor(add_k_proj) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.add_k_proj, 140581769896016) # encoder_hidden_states_key_proj = attn.add_k_proj(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1736 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.add_k_proj.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.add_k_proj.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.add_k_proj.training, 140591004393440) # encoder_hidden_states_key_proj = attn.add_k_proj(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1736 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.add_q_proj, accessed_by=DictGetItemGuardAccessor(add_q_proj) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.add_q_proj, 140581769896112) # encoder_hidden_states_query_proj = attn.add_q_proj(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1735 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.add_q_proj.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.add_q_proj.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.add_q_proj.training, 140591004393440) # encoder_hidden_states_query_proj = attn.add_q_proj(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1735 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.add_v_proj, accessed_by=DictGetItemGuardAccessor(add_v_proj) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.add_v_proj, 140581769896064) # encoder_hidden_states_value_proj = attn.add_v_proj(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1737 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.add_v_proj.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.add_v_proj.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.add_v_proj.training, 140591004393440) # encoder_hidden_states_value_proj = attn.add_v_proj(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1737 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_add_out, accessed_by=DictGetItemGuardAccessor(to_add_out) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.to_add_out, 140581769896304) # encoder_hidden_states = attn.to_add_out(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1779 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_add_out.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_add_out.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.to_add_out.training, 140591004393440) # encoder_hidden_states = attn.to_add_out(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1779 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.norm_added_k, accessed_by=DictGetItemGuardAccessor(norm_added_k) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.norm_added_k, 140581769896448) # if attn.norm_added_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1751 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.norm_added_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[13].attn.norm_added_k.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.norm_added_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.norm_added_k.training, 140591004393440) # if attn.norm_added_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1751 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.norm_added_k.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[13].attn.norm_added_k.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.norm_added_k._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.norm_added_k.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.norm_added_k.weight, 140581765997296) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.norm_added_k._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.norm_added_k._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.norm_added_k._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.norm_added_k._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.norm_added_q, accessed_by=DictGetItemGuardAccessor(norm_added_q) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.norm_added_q, 140581769896352) # if attn.norm_added_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1749 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.norm_added_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[13].attn.norm_added_q.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.norm_added_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.norm_added_q.training, 140591004393440) # if attn.norm_added_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1749 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.norm_added_q.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[13].attn.norm_added_q.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.norm_added_q._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.norm_added_q.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.norm_added_q.weight, 140581765997376) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.norm_added_q._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.norm_added_q._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.norm_added_q._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.norm_added_q._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.heads, accessed_by=DictGetItemGuardAccessor(heads) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[13].attn.heads == 24 # head_dim = inner_dim // attn.heads # diffusers/src/diffusers/models/attention_processor.py:1721 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.processor, accessed_by=DictGetItemGuardAccessor(processor) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[13].attn.processor, 93831581524080) # attn_parameters = set(inspect.signature(self.processor.__call__).parameters.keys()) # diffusers/src/diffusers/models/attention_processor.py:479 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.processor, 140581769895584) # return self.processor( # diffusers/src/diffusers/models/attention_processor.py:490 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].norm1, accessed_by=DictGetItemGuardAccessor(norm1) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].norm1, 140581769895152) # norm_hidden_states, gate_msa, shift_mlp, scale_mlp, gate_mlp = self.norm1(hidden_states, emb=temb) # diffusers/src/diffusers/models/transformers/transformer_flux.py:165 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].norm1.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[13].norm1.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].norm1.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].norm1.training, 140591004393440) # norm_hidden_states, gate_msa, shift_mlp, scale_mlp, gate_mlp = self.norm1(hidden_states, emb=temb) # diffusers/src/diffusers/models/transformers/transformer_flux.py:165 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].norm1.emb, accessed_by=DictGetItemGuardAccessor(emb) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].norm1.emb, 140591004478624) # if self.emb is not None: # diffusers/src/diffusers/models/normalization.py:135 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].norm1._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].norm1.norm, accessed_by=DictGetItemGuardAccessor(norm) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].norm1.norm, 140581769895296) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:139 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].norm1.norm.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].norm1.norm.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].norm1.norm.training, 140591004393440) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:139 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].norm1.silu, accessed_by=DictGetItemGuardAccessor(silu) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].norm1.silu, 140581769895200) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].norm1.silu.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].norm1.silu.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].norm1.silu.training, 140591004393440) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].norm1.linear, accessed_by=DictGetItemGuardAccessor(linear) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].norm1.linear, 140581769895248) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].norm1.linear.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].norm1.linear.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].norm1.linear.training, 140591004393440) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].norm1._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].norm1._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].norm1._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].norm1._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].norm2, accessed_by=DictGetItemGuardAccessor(norm2) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].norm2, 140581769896496) # norm_hidden_states = self.norm2(hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:182 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].norm2.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].norm2.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].norm2.training, 140591004393440) # norm_hidden_states = self.norm2(hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:182 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff_context, accessed_by=DictGetItemGuardAccessor(ff_context) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].ff_context, 140581769896832) # context_ff_output = self.ff_context(norm_encoder_hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:198 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff_context.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[13].ff_context.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff_context.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].ff_context.training, 140591004393440) # context_ff_output = self.ff_context(norm_encoder_hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:198 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff_context._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff_context.net, accessed_by=DictGetItemGuardAccessor(net) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].ff_context.net, 140581769896976) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[13].ff_context.net, 93831537618768) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- LENGTH_CHECK: len(L['self'].transformer_blocks[13].ff_context.net) == 3 # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff_context.net.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff_context.net.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].ff_context.net.training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff_context.net[0], accessed_by=GetItemGuardAccessor(0) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].ff_context.net[0], 140581769896928) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff_context.net[0].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[13].ff_context.net[0].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff_context.net[0].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].ff_context.net[0].training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff_context.net[0]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff_context.net[0].proj, accessed_by=DictGetItemGuardAccessor(proj) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].ff_context.net[0].proj, 140581769897024) # hidden_states = self.proj(hidden_states) # diffusers/src/diffusers/models/activations.py:88 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff_context.net[0].proj.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff_context.net[0].proj.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].ff_context.net[0].proj.training, 140591004393440) # hidden_states = self.proj(hidden_states) # diffusers/src/diffusers/models/activations.py:88 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff_context.net[0].approximate, accessed_by=DictGetItemGuardAccessor(approximate) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[13].ff_context.net[0].approximate == 'tanh' # return F.gelu(gate, approximate=self.approximate) # diffusers/src/diffusers/models/activations.py:83 in gelu V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff_context.net[0]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff_context.net[0]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff_context.net[0]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff_context.net[0]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff_context.net[1], accessed_by=GetItemGuardAccessor(1) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].ff_context.net[1], 140581769897120) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff_context.net[1].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff_context.net[1].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].ff_context.net[1].training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff_context.net[2], accessed_by=GetItemGuardAccessor(2) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].ff_context.net[2], 140581769897168) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff_context.net[2].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff_context.net[2].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].ff_context.net[2].training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff_context._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff_context._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff_context._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff_context._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].norm1_context, accessed_by=DictGetItemGuardAccessor(norm1_context) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].norm1_context, 140581769895344) # norm_encoder_hidden_states, c_gate_msa, c_shift_mlp, c_scale_mlp, c_gate_mlp = self.norm1_context( # diffusers/src/diffusers/models/transformers/transformer_flux.py:167 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].norm1_context.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[13].norm1_context.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].norm1_context.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].norm1_context.training, 140591004393440) # norm_encoder_hidden_states, c_gate_msa, c_shift_mlp, c_scale_mlp, c_gate_mlp = self.norm1_context( # diffusers/src/diffusers/models/transformers/transformer_flux.py:167 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].norm1_context.emb, accessed_by=DictGetItemGuardAccessor(emb) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].norm1_context.emb, 140591004478624) # if self.emb is not None: # diffusers/src/diffusers/models/normalization.py:135 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].norm1_context._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].norm1_context.norm, accessed_by=DictGetItemGuardAccessor(norm) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].norm1_context.norm, 140581769895536) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:139 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].norm1_context.norm.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].norm1_context.norm.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].norm1_context.norm.training, 140591004393440) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:139 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].norm1_context.silu, accessed_by=DictGetItemGuardAccessor(silu) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].norm1_context.silu, 140581769895440) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].norm1_context.silu.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].norm1_context.silu.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].norm1_context.silu.training, 140591004393440) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].norm1_context.linear, accessed_by=DictGetItemGuardAccessor(linear) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].norm1_context.linear, 140581769895488) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].norm1_context.linear.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].norm1_context.linear.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].norm1_context.linear.training, 140591004393440) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].norm1_context._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].norm1_context._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].norm1_context._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].norm1_context._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].norm2_context, accessed_by=DictGetItemGuardAccessor(norm2_context) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].norm2_context, 140581769896544) # norm_encoder_hidden_states = self.norm2_context(encoder_hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:195 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].norm2_context.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].norm2_context.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].norm2_context.training, 140591004393440) # norm_encoder_hidden_states = self.norm2_context(encoder_hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:195 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | +- GuardManager: source=L['self'].transformer_blocks[14], accessed_by=GetItemGuardAccessor(14) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14], 140581769894816) # for index_block, block in enumerate(self.transformer_blocks): # diffusers/src/diffusers/models/transformers/transformer_flux.py:471 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[14].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].training, 140591004393440) # for index_block, block in enumerate(self.transformer_blocks): # diffusers/src/diffusers/models/transformers/transformer_flux.py:471 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff, accessed_by=DictGetItemGuardAccessor(ff) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].ff, 140581769898464) # ff_output = self.ff(norm_hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:185 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[14].ff.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].ff.training, 140591004393440) # ff_output = self.ff(norm_hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:185 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff.net, accessed_by=DictGetItemGuardAccessor(net) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].ff.net, 140581769898704) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[14].ff.net, 93831537618768) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- LENGTH_CHECK: len(L['self'].transformer_blocks[14].ff.net) == 3 # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff.net.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff.net.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].ff.net.training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff.net[0], accessed_by=GetItemGuardAccessor(0) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].ff.net[0], 140581769898656) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff.net[0].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[14].ff.net[0].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff.net[0].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].ff.net[0].training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff.net[0]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff.net[0].proj, accessed_by=DictGetItemGuardAccessor(proj) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].ff.net[0].proj, 140581769898752) # hidden_states = self.proj(hidden_states) # diffusers/src/diffusers/models/activations.py:88 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff.net[0].proj.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff.net[0].proj.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].ff.net[0].proj.training, 140591004393440) # hidden_states = self.proj(hidden_states) # diffusers/src/diffusers/models/activations.py:88 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff.net[0].approximate, accessed_by=DictGetItemGuardAccessor(approximate) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[14].ff.net[0].approximate == 'tanh' # return F.gelu(gate, approximate=self.approximate) # diffusers/src/diffusers/models/activations.py:83 in gelu V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff.net[0]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff.net[0]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff.net[0]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff.net[0]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff.net[1], accessed_by=GetItemGuardAccessor(1) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].ff.net[1], 140581769898800) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff.net[1].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff.net[1].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].ff.net[1].training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff.net[2], accessed_by=GetItemGuardAccessor(2) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].ff.net[2], 140581769898848) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff.net[2].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff.net[2].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].ff.net[2].training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn, accessed_by=DictGetItemGuardAccessor(attn) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn, 140581769897696) # attn_output, context_attn_output = self.attn( # diffusers/src/diffusers/models/transformers/transformer_flux.py:172 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[14].attn.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.training, 140591004393440) # attn_output, context_attn_output = self.attn( # diffusers/src/diffusers/models/transformers/transformer_flux.py:172 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_k, accessed_by=DictGetItemGuardAccessor(to_k) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.to_k, 140581769897840) # key = attn.to_k(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1717 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.to_k.training, 140591004393440) # key = attn.to_k(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1717 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_q, accessed_by=DictGetItemGuardAccessor(to_q) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.to_q, 140581769897936) # query = attn.to_q(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1716 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.to_q.training, 140591004393440) # query = attn.to_q(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1716 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_v, accessed_by=DictGetItemGuardAccessor(to_v) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.to_v, 140581769898032) # value = attn.to_v(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1718 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_v.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_v.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.to_v.training, 140591004393440) # value = attn.to_v(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1718 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.norm_k, accessed_by=DictGetItemGuardAccessor(norm_k) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.norm_k, 140581769897888) # if attn.norm_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1729 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.norm_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[14].attn.norm_k.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.norm_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.norm_k.training, 140591004393440) # if attn.norm_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1729 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.norm_k.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[14].attn.norm_k.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.norm_k._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.norm_k.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.norm_k.weight, 140581765999456) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.norm_k._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.norm_k._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.norm_k._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.norm_k._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.norm_q, accessed_by=DictGetItemGuardAccessor(norm_q) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.norm_q, 140581769897792) # if attn.norm_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1727 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.norm_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[14].attn.norm_q.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.norm_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.norm_q.training, 140591004393440) # if attn.norm_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1727 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.norm_q.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[14].attn.norm_q.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.norm_q._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.norm_q.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.norm_q.weight, 140581772710976) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.norm_q._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.norm_q._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.norm_q._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.norm_q._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_out, accessed_by=DictGetItemGuardAccessor(to_out) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.to_out, 140581769898224) # hidden_states = attn.to_out[0](hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1776 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_out.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_out.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.to_out.training, 140591004393440) # hidden_states = attn.to_out[0](hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1776 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_out[0], accessed_by=GetItemGuardAccessor(0) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.to_out[0], 140581769898272) # hidden_states = attn.to_out[0](hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1776 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_out[0].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_out[0].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.to_out[0].training, 140591004393440) # hidden_states = attn.to_out[0](hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1776 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_out[1], accessed_by=GetItemGuardAccessor(1) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.to_out[1], 140581769898320) # hidden_states = attn.to_out[1](hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1778 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_out[1].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_out[1].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.to_out[1].training, 140591004393440) # hidden_states = attn.to_out[1](hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1778 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.add_k_proj, accessed_by=DictGetItemGuardAccessor(add_k_proj) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.add_k_proj, 140581769898080) # encoder_hidden_states_key_proj = attn.add_k_proj(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1736 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.add_k_proj.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.add_k_proj.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.add_k_proj.training, 140591004393440) # encoder_hidden_states_key_proj = attn.add_k_proj(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1736 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.add_q_proj, accessed_by=DictGetItemGuardAccessor(add_q_proj) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.add_q_proj, 140581769898176) # encoder_hidden_states_query_proj = attn.add_q_proj(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1735 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.add_q_proj.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.add_q_proj.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.add_q_proj.training, 140591004393440) # encoder_hidden_states_query_proj = attn.add_q_proj(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1735 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.add_v_proj, accessed_by=DictGetItemGuardAccessor(add_v_proj) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.add_v_proj, 140581769898128) # encoder_hidden_states_value_proj = attn.add_v_proj(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1737 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.add_v_proj.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.add_v_proj.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.add_v_proj.training, 140591004393440) # encoder_hidden_states_value_proj = attn.add_v_proj(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1737 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_add_out, accessed_by=DictGetItemGuardAccessor(to_add_out) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.to_add_out, 140581769898368) # encoder_hidden_states = attn.to_add_out(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1779 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_add_out.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_add_out.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.to_add_out.training, 140591004393440) # encoder_hidden_states = attn.to_add_out(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1779 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.norm_added_k, accessed_by=DictGetItemGuardAccessor(norm_added_k) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.norm_added_k, 140581769898512) # if attn.norm_added_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1751 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.norm_added_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[14].attn.norm_added_k.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.norm_added_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.norm_added_k.training, 140591004393440) # if attn.norm_added_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1751 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.norm_added_k.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[14].attn.norm_added_k.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.norm_added_k._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.norm_added_k.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.norm_added_k.weight, 140581765999296) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.norm_added_k._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.norm_added_k._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.norm_added_k._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.norm_added_k._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.norm_added_q, accessed_by=DictGetItemGuardAccessor(norm_added_q) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.norm_added_q, 140581769898416) # if attn.norm_added_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1749 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.norm_added_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[14].attn.norm_added_q.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.norm_added_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.norm_added_q.training, 140591004393440) # if attn.norm_added_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1749 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.norm_added_q.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[14].attn.norm_added_q.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.norm_added_q._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.norm_added_q.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.norm_added_q.weight, 140581765999376) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.norm_added_q._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.norm_added_q._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.norm_added_q._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.norm_added_q._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.heads, accessed_by=DictGetItemGuardAccessor(heads) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[14].attn.heads == 24 # head_dim = inner_dim // attn.heads # diffusers/src/diffusers/models/attention_processor.py:1721 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.processor, accessed_by=DictGetItemGuardAccessor(processor) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[14].attn.processor, 93831581524080) # attn_parameters = set(inspect.signature(self.processor.__call__).parameters.keys()) # diffusers/src/diffusers/models/attention_processor.py:479 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.processor, 140581769897648) # return self.processor( # diffusers/src/diffusers/models/attention_processor.py:490 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].norm1, accessed_by=DictGetItemGuardAccessor(norm1) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].norm1, 140581769897216) # norm_hidden_states, gate_msa, shift_mlp, scale_mlp, gate_mlp = self.norm1(hidden_states, emb=temb) # diffusers/src/diffusers/models/transformers/transformer_flux.py:165 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].norm1.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[14].norm1.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].norm1.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].norm1.training, 140591004393440) # norm_hidden_states, gate_msa, shift_mlp, scale_mlp, gate_mlp = self.norm1(hidden_states, emb=temb) # diffusers/src/diffusers/models/transformers/transformer_flux.py:165 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].norm1.emb, accessed_by=DictGetItemGuardAccessor(emb) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].norm1.emb, 140591004478624) # if self.emb is not None: # diffusers/src/diffusers/models/normalization.py:135 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].norm1._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].norm1.norm, accessed_by=DictGetItemGuardAccessor(norm) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].norm1.norm, 140581769897360) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:139 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].norm1.norm.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].norm1.norm.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].norm1.norm.training, 140591004393440) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:139 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].norm1.silu, accessed_by=DictGetItemGuardAccessor(silu) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].norm1.silu, 140581769897264) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].norm1.silu.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].norm1.silu.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].norm1.silu.training, 140591004393440) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].norm1.linear, accessed_by=DictGetItemGuardAccessor(linear) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].norm1.linear, 140581769897312) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].norm1.linear.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].norm1.linear.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].norm1.linear.training, 140591004393440) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].norm1._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].norm1._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].norm1._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].norm1._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].norm2, accessed_by=DictGetItemGuardAccessor(norm2) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].norm2, 140581769898560) # norm_hidden_states = self.norm2(hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:182 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].norm2.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].norm2.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].norm2.training, 140591004393440) # norm_hidden_states = self.norm2(hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:182 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff_context, accessed_by=DictGetItemGuardAccessor(ff_context) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].ff_context, 140581769898896) # context_ff_output = self.ff_context(norm_encoder_hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:198 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff_context.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[14].ff_context.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff_context.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].ff_context.training, 140591004393440) # context_ff_output = self.ff_context(norm_encoder_hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:198 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff_context._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff_context.net, accessed_by=DictGetItemGuardAccessor(net) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].ff_context.net, 140581769899040) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[14].ff_context.net, 93831537618768) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- LENGTH_CHECK: len(L['self'].transformer_blocks[14].ff_context.net) == 3 # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff_context.net.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff_context.net.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].ff_context.net.training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff_context.net[0], accessed_by=GetItemGuardAccessor(0) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].ff_context.net[0], 140581769898992) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff_context.net[0].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[14].ff_context.net[0].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff_context.net[0].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].ff_context.net[0].training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff_context.net[0]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff_context.net[0].proj, accessed_by=DictGetItemGuardAccessor(proj) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].ff_context.net[0].proj, 140581769899088) # hidden_states = self.proj(hidden_states) # diffusers/src/diffusers/models/activations.py:88 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff_context.net[0].proj.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff_context.net[0].proj.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].ff_context.net[0].proj.training, 140591004393440) # hidden_states = self.proj(hidden_states) # diffusers/src/diffusers/models/activations.py:88 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff_context.net[0].approximate, accessed_by=DictGetItemGuardAccessor(approximate) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[14].ff_context.net[0].approximate == 'tanh' # return F.gelu(gate, approximate=self.approximate) # diffusers/src/diffusers/models/activations.py:83 in gelu V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff_context.net[0]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff_context.net[0]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff_context.net[0]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff_context.net[0]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff_context.net[1], accessed_by=GetItemGuardAccessor(1) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].ff_context.net[1], 140581769899184) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff_context.net[1].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff_context.net[1].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].ff_context.net[1].training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff_context.net[2], accessed_by=GetItemGuardAccessor(2) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].ff_context.net[2], 140581769899232) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff_context.net[2].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff_context.net[2].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].ff_context.net[2].training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff_context._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff_context._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff_context._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff_context._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].norm1_context, accessed_by=DictGetItemGuardAccessor(norm1_context) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].norm1_context, 140581769897408) # norm_encoder_hidden_states, c_gate_msa, c_shift_mlp, c_scale_mlp, c_gate_mlp = self.norm1_context( # diffusers/src/diffusers/models/transformers/transformer_flux.py:167 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].norm1_context.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[14].norm1_context.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].norm1_context.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].norm1_context.training, 140591004393440) # norm_encoder_hidden_states, c_gate_msa, c_shift_mlp, c_scale_mlp, c_gate_mlp = self.norm1_context( # diffusers/src/diffusers/models/transformers/transformer_flux.py:167 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].norm1_context.emb, accessed_by=DictGetItemGuardAccessor(emb) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].norm1_context.emb, 140591004478624) # if self.emb is not None: # diffusers/src/diffusers/models/normalization.py:135 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].norm1_context._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].norm1_context.norm, accessed_by=DictGetItemGuardAccessor(norm) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].norm1_context.norm, 140581769897600) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:139 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].norm1_context.norm.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].norm1_context.norm.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].norm1_context.norm.training, 140591004393440) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:139 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].norm1_context.silu, accessed_by=DictGetItemGuardAccessor(silu) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].norm1_context.silu, 140581769897504) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].norm1_context.silu.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].norm1_context.silu.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].norm1_context.silu.training, 140591004393440) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].norm1_context.linear, accessed_by=DictGetItemGuardAccessor(linear) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].norm1_context.linear, 140581769897552) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].norm1_context.linear.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].norm1_context.linear.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].norm1_context.linear.training, 140591004393440) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].norm1_context._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].norm1_context._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].norm1_context._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].norm1_context._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].norm2_context, accessed_by=DictGetItemGuardAccessor(norm2_context) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].norm2_context, 140581769898608) # norm_encoder_hidden_states = self.norm2_context(encoder_hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:195 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].norm2_context.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].norm2_context.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].norm2_context.training, 140591004393440) # norm_encoder_hidden_states = self.norm2_context(encoder_hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:195 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | +- GuardManager: source=L['self'].transformer_blocks[15], accessed_by=GetItemGuardAccessor(15) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15], 140581769896880) # for index_block, block in enumerate(self.transformer_blocks): # diffusers/src/diffusers/models/transformers/transformer_flux.py:471 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[15].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].training, 140591004393440) # for index_block, block in enumerate(self.transformer_blocks): # diffusers/src/diffusers/models/transformers/transformer_flux.py:471 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff, accessed_by=DictGetItemGuardAccessor(ff) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].ff, 140581769900528) # ff_output = self.ff(norm_hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:185 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[15].ff.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].ff.training, 140591004393440) # ff_output = self.ff(norm_hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:185 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff.net, accessed_by=DictGetItemGuardAccessor(net) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].ff.net, 140581769900768) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[15].ff.net, 93831537618768) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- LENGTH_CHECK: len(L['self'].transformer_blocks[15].ff.net) == 3 # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff.net.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff.net.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].ff.net.training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff.net[0], accessed_by=GetItemGuardAccessor(0) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].ff.net[0], 140581769900720) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff.net[0].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[15].ff.net[0].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff.net[0].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].ff.net[0].training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff.net[0]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff.net[0].proj, accessed_by=DictGetItemGuardAccessor(proj) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].ff.net[0].proj, 140581769900816) # hidden_states = self.proj(hidden_states) # diffusers/src/diffusers/models/activations.py:88 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff.net[0].proj.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff.net[0].proj.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].ff.net[0].proj.training, 140591004393440) # hidden_states = self.proj(hidden_states) # diffusers/src/diffusers/models/activations.py:88 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff.net[0].approximate, accessed_by=DictGetItemGuardAccessor(approximate) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[15].ff.net[0].approximate == 'tanh' # return F.gelu(gate, approximate=self.approximate) # diffusers/src/diffusers/models/activations.py:83 in gelu V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff.net[0]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff.net[0]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff.net[0]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff.net[0]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff.net[1], accessed_by=GetItemGuardAccessor(1) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].ff.net[1], 140581769900864) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff.net[1].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff.net[1].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].ff.net[1].training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff.net[2], accessed_by=GetItemGuardAccessor(2) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].ff.net[2], 140581769900912) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff.net[2].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff.net[2].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].ff.net[2].training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn, accessed_by=DictGetItemGuardAccessor(attn) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn, 140581769899760) # attn_output, context_attn_output = self.attn( # diffusers/src/diffusers/models/transformers/transformer_flux.py:172 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[15].attn.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.training, 140591004393440) # attn_output, context_attn_output = self.attn( # diffusers/src/diffusers/models/transformers/transformer_flux.py:172 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_k, accessed_by=DictGetItemGuardAccessor(to_k) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.to_k, 140581769899904) # key = attn.to_k(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1717 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.to_k.training, 140591004393440) # key = attn.to_k(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1717 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_q, accessed_by=DictGetItemGuardAccessor(to_q) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.to_q, 140581769900000) # query = attn.to_q(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1716 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.to_q.training, 140591004393440) # query = attn.to_q(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1716 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_v, accessed_by=DictGetItemGuardAccessor(to_v) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.to_v, 140581769900096) # value = attn.to_v(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1718 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_v.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_v.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.to_v.training, 140591004393440) # value = attn.to_v(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1718 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.norm_k, accessed_by=DictGetItemGuardAccessor(norm_k) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.norm_k, 140581769899952) # if attn.norm_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1729 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.norm_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[15].attn.norm_k.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.norm_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.norm_k.training, 140591004393440) # if attn.norm_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1729 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.norm_k.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[15].attn.norm_k.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.norm_k._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.norm_k.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.norm_k.weight, 140581765900192) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.norm_k._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.norm_k._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.norm_k._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.norm_k._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.norm_q, accessed_by=DictGetItemGuardAccessor(norm_q) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.norm_q, 140581769899856) # if attn.norm_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1727 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.norm_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[15].attn.norm_q.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.norm_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.norm_q.training, 140591004393440) # if attn.norm_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1727 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.norm_q.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[15].attn.norm_q.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.norm_q._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.norm_q.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.norm_q.weight, 140581773243424) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.norm_q._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.norm_q._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.norm_q._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.norm_q._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_out, accessed_by=DictGetItemGuardAccessor(to_out) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.to_out, 140581769900288) # hidden_states = attn.to_out[0](hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1776 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_out.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_out.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.to_out.training, 140591004393440) # hidden_states = attn.to_out[0](hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1776 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_out[0], accessed_by=GetItemGuardAccessor(0) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.to_out[0], 140581769900336) # hidden_states = attn.to_out[0](hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1776 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_out[0].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_out[0].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.to_out[0].training, 140591004393440) # hidden_states = attn.to_out[0](hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1776 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_out[1], accessed_by=GetItemGuardAccessor(1) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.to_out[1], 140581769900384) # hidden_states = attn.to_out[1](hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1778 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_out[1].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_out[1].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.to_out[1].training, 140591004393440) # hidden_states = attn.to_out[1](hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1778 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.add_k_proj, accessed_by=DictGetItemGuardAccessor(add_k_proj) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.add_k_proj, 140581769900144) # encoder_hidden_states_key_proj = attn.add_k_proj(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1736 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.add_k_proj.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.add_k_proj.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.add_k_proj.training, 140591004393440) # encoder_hidden_states_key_proj = attn.add_k_proj(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1736 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.add_q_proj, accessed_by=DictGetItemGuardAccessor(add_q_proj) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.add_q_proj, 140581769900240) # encoder_hidden_states_query_proj = attn.add_q_proj(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1735 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.add_q_proj.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.add_q_proj.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.add_q_proj.training, 140591004393440) # encoder_hidden_states_query_proj = attn.add_q_proj(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1735 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.add_v_proj, accessed_by=DictGetItemGuardAccessor(add_v_proj) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.add_v_proj, 140581769900192) # encoder_hidden_states_value_proj = attn.add_v_proj(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1737 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.add_v_proj.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.add_v_proj.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.add_v_proj.training, 140591004393440) # encoder_hidden_states_value_proj = attn.add_v_proj(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1737 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_add_out, accessed_by=DictGetItemGuardAccessor(to_add_out) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.to_add_out, 140581769900432) # encoder_hidden_states = attn.to_add_out(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1779 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_add_out.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_add_out.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.to_add_out.training, 140591004393440) # encoder_hidden_states = attn.to_add_out(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1779 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.norm_added_k, accessed_by=DictGetItemGuardAccessor(norm_added_k) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.norm_added_k, 140581769900576) # if attn.norm_added_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1751 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.norm_added_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[15].attn.norm_added_k.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.norm_added_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.norm_added_k.training, 140591004393440) # if attn.norm_added_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1751 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.norm_added_k.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[15].attn.norm_added_k.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.norm_added_k._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.norm_added_k.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.norm_added_k.weight, 140581772718976) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.norm_added_k._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.norm_added_k._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.norm_added_k._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.norm_added_k._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.norm_added_q, accessed_by=DictGetItemGuardAccessor(norm_added_q) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.norm_added_q, 140581769900480) # if attn.norm_added_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1749 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.norm_added_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[15].attn.norm_added_q.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.norm_added_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.norm_added_q.training, 140591004393440) # if attn.norm_added_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1749 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.norm_added_q.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[15].attn.norm_added_q.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.norm_added_q._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.norm_added_q.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.norm_added_q.weight, 140581772751824) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.norm_added_q._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.norm_added_q._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.norm_added_q._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.norm_added_q._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.heads, accessed_by=DictGetItemGuardAccessor(heads) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[15].attn.heads == 24 # head_dim = inner_dim // attn.heads # diffusers/src/diffusers/models/attention_processor.py:1721 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.processor, accessed_by=DictGetItemGuardAccessor(processor) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[15].attn.processor, 93831581524080) # attn_parameters = set(inspect.signature(self.processor.__call__).parameters.keys()) # diffusers/src/diffusers/models/attention_processor.py:479 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.processor, 140581769899712) # return self.processor( # diffusers/src/diffusers/models/attention_processor.py:490 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].norm1, accessed_by=DictGetItemGuardAccessor(norm1) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].norm1, 140581769899280) # norm_hidden_states, gate_msa, shift_mlp, scale_mlp, gate_mlp = self.norm1(hidden_states, emb=temb) # diffusers/src/diffusers/models/transformers/transformer_flux.py:165 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].norm1.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[15].norm1.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].norm1.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].norm1.training, 140591004393440) # norm_hidden_states, gate_msa, shift_mlp, scale_mlp, gate_mlp = self.norm1(hidden_states, emb=temb) # diffusers/src/diffusers/models/transformers/transformer_flux.py:165 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].norm1.emb, accessed_by=DictGetItemGuardAccessor(emb) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].norm1.emb, 140591004478624) # if self.emb is not None: # diffusers/src/diffusers/models/normalization.py:135 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].norm1._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].norm1.norm, accessed_by=DictGetItemGuardAccessor(norm) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].norm1.norm, 140581769899424) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:139 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].norm1.norm.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].norm1.norm.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].norm1.norm.training, 140591004393440) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:139 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].norm1.silu, accessed_by=DictGetItemGuardAccessor(silu) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].norm1.silu, 140581769899328) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].norm1.silu.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].norm1.silu.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].norm1.silu.training, 140591004393440) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].norm1.linear, accessed_by=DictGetItemGuardAccessor(linear) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].norm1.linear, 140581769899376) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].norm1.linear.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].norm1.linear.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].norm1.linear.training, 140591004393440) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].norm1._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].norm1._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].norm1._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].norm1._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].norm2, accessed_by=DictGetItemGuardAccessor(norm2) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].norm2, 140581769900624) # norm_hidden_states = self.norm2(hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:182 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].norm2.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].norm2.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].norm2.training, 140591004393440) # norm_hidden_states = self.norm2(hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:182 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff_context, accessed_by=DictGetItemGuardAccessor(ff_context) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].ff_context, 140581769900960) # context_ff_output = self.ff_context(norm_encoder_hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:198 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff_context.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[15].ff_context.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff_context.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].ff_context.training, 140591004393440) # context_ff_output = self.ff_context(norm_encoder_hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:198 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff_context._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff_context.net, accessed_by=DictGetItemGuardAccessor(net) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].ff_context.net, 140581769901104) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[15].ff_context.net, 93831537618768) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- LENGTH_CHECK: len(L['self'].transformer_blocks[15].ff_context.net) == 3 # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff_context.net.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff_context.net.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].ff_context.net.training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff_context.net[0], accessed_by=GetItemGuardAccessor(0) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].ff_context.net[0], 140581769901056) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff_context.net[0].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[15].ff_context.net[0].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff_context.net[0].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].ff_context.net[0].training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff_context.net[0]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff_context.net[0].proj, accessed_by=DictGetItemGuardAccessor(proj) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].ff_context.net[0].proj, 140581769901152) # hidden_states = self.proj(hidden_states) # diffusers/src/diffusers/models/activations.py:88 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff_context.net[0].proj.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff_context.net[0].proj.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].ff_context.net[0].proj.training, 140591004393440) # hidden_states = self.proj(hidden_states) # diffusers/src/diffusers/models/activations.py:88 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff_context.net[0].approximate, accessed_by=DictGetItemGuardAccessor(approximate) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[15].ff_context.net[0].approximate == 'tanh' # return F.gelu(gate, approximate=self.approximate) # diffusers/src/diffusers/models/activations.py:83 in gelu V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff_context.net[0]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff_context.net[0]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff_context.net[0]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff_context.net[0]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff_context.net[1], accessed_by=GetItemGuardAccessor(1) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].ff_context.net[1], 140581769901248) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff_context.net[1].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff_context.net[1].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].ff_context.net[1].training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff_context.net[2], accessed_by=GetItemGuardAccessor(2) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].ff_context.net[2], 140581769901296) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff_context.net[2].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff_context.net[2].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].ff_context.net[2].training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff_context._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff_context._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff_context._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff_context._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].norm1_context, accessed_by=DictGetItemGuardAccessor(norm1_context) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].norm1_context, 140581769899472) # norm_encoder_hidden_states, c_gate_msa, c_shift_mlp, c_scale_mlp, c_gate_mlp = self.norm1_context( # diffusers/src/diffusers/models/transformers/transformer_flux.py:167 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].norm1_context.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[15].norm1_context.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].norm1_context.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].norm1_context.training, 140591004393440) # norm_encoder_hidden_states, c_gate_msa, c_shift_mlp, c_scale_mlp, c_gate_mlp = self.norm1_context( # diffusers/src/diffusers/models/transformers/transformer_flux.py:167 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].norm1_context.emb, accessed_by=DictGetItemGuardAccessor(emb) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].norm1_context.emb, 140591004478624) # if self.emb is not None: # diffusers/src/diffusers/models/normalization.py:135 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].norm1_context._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].norm1_context.norm, accessed_by=DictGetItemGuardAccessor(norm) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].norm1_context.norm, 140581769899664) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:139 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].norm1_context.norm.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].norm1_context.norm.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].norm1_context.norm.training, 140591004393440) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:139 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].norm1_context.silu, accessed_by=DictGetItemGuardAccessor(silu) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].norm1_context.silu, 140581769899568) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].norm1_context.silu.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].norm1_context.silu.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].norm1_context.silu.training, 140591004393440) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].norm1_context.linear, accessed_by=DictGetItemGuardAccessor(linear) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].norm1_context.linear, 140581769899616) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].norm1_context.linear.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].norm1_context.linear.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].norm1_context.linear.training, 140591004393440) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].norm1_context._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].norm1_context._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].norm1_context._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].norm1_context._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].norm2_context, accessed_by=DictGetItemGuardAccessor(norm2_context) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].norm2_context, 140581769900672) # norm_encoder_hidden_states = self.norm2_context(encoder_hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:195 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].norm2_context.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].norm2_context.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].norm2_context.training, 140591004393440) # norm_encoder_hidden_states = self.norm2_context(encoder_hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:195 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | +- GuardManager: source=L['self'].transformer_blocks[16], accessed_by=GetItemGuardAccessor(16) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16], 140581769898944) # for index_block, block in enumerate(self.transformer_blocks): # diffusers/src/diffusers/models/transformers/transformer_flux.py:471 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[16].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].training, 140591004393440) # for index_block, block in enumerate(self.transformer_blocks): # diffusers/src/diffusers/models/transformers/transformer_flux.py:471 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff, accessed_by=DictGetItemGuardAccessor(ff) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].ff, 140581769902592) # ff_output = self.ff(norm_hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:185 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[16].ff.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].ff.training, 140591004393440) # ff_output = self.ff(norm_hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:185 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff.net, accessed_by=DictGetItemGuardAccessor(net) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].ff.net, 140581769902832) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[16].ff.net, 93831537618768) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- LENGTH_CHECK: len(L['self'].transformer_blocks[16].ff.net) == 3 # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff.net.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff.net.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].ff.net.training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff.net[0], accessed_by=GetItemGuardAccessor(0) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].ff.net[0], 140581769902784) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff.net[0].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[16].ff.net[0].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff.net[0].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].ff.net[0].training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff.net[0]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff.net[0].proj, accessed_by=DictGetItemGuardAccessor(proj) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].ff.net[0].proj, 140581769902880) # hidden_states = self.proj(hidden_states) # diffusers/src/diffusers/models/activations.py:88 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff.net[0].proj.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff.net[0].proj.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].ff.net[0].proj.training, 140591004393440) # hidden_states = self.proj(hidden_states) # diffusers/src/diffusers/models/activations.py:88 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff.net[0].approximate, accessed_by=DictGetItemGuardAccessor(approximate) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[16].ff.net[0].approximate == 'tanh' # return F.gelu(gate, approximate=self.approximate) # diffusers/src/diffusers/models/activations.py:83 in gelu V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff.net[0]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff.net[0]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff.net[0]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff.net[0]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff.net[1], accessed_by=GetItemGuardAccessor(1) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].ff.net[1], 140581769902928) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff.net[1].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff.net[1].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].ff.net[1].training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff.net[2], accessed_by=GetItemGuardAccessor(2) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].ff.net[2], 140581769902976) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff.net[2].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff.net[2].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].ff.net[2].training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn, accessed_by=DictGetItemGuardAccessor(attn) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn, 140581769901824) # attn_output, context_attn_output = self.attn( # diffusers/src/diffusers/models/transformers/transformer_flux.py:172 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[16].attn.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.training, 140591004393440) # attn_output, context_attn_output = self.attn( # diffusers/src/diffusers/models/transformers/transformer_flux.py:172 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_k, accessed_by=DictGetItemGuardAccessor(to_k) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.to_k, 140581769901968) # key = attn.to_k(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1717 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.to_k.training, 140591004393440) # key = attn.to_k(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1717 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_q, accessed_by=DictGetItemGuardAccessor(to_q) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.to_q, 140581769902064) # query = attn.to_q(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1716 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.to_q.training, 140591004393440) # query = attn.to_q(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1716 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_v, accessed_by=DictGetItemGuardAccessor(to_v) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.to_v, 140581769902160) # value = attn.to_v(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1718 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_v.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_v.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.to_v.training, 140591004393440) # value = attn.to_v(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1718 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.norm_k, accessed_by=DictGetItemGuardAccessor(norm_k) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.norm_k, 140581769902016) # if attn.norm_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1729 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.norm_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[16].attn.norm_k.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.norm_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.norm_k.training, 140591004393440) # if attn.norm_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1729 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.norm_k.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[16].attn.norm_k.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.norm_k._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.norm_k.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.norm_k.weight, 140581772749424) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.norm_k._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.norm_k._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.norm_k._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.norm_k._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.norm_q, accessed_by=DictGetItemGuardAccessor(norm_q) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.norm_q, 140581769901920) # if attn.norm_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1727 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.norm_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[16].attn.norm_q.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.norm_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.norm_q.training, 140591004393440) # if attn.norm_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1727 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.norm_q.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[16].attn.norm_q.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.norm_q._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.norm_q.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.norm_q.weight, 140581772748704) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.norm_q._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.norm_q._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.norm_q._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.norm_q._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_out, accessed_by=DictGetItemGuardAccessor(to_out) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.to_out, 140581769902352) # hidden_states = attn.to_out[0](hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1776 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_out.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_out.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.to_out.training, 140591004393440) # hidden_states = attn.to_out[0](hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1776 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_out[0], accessed_by=GetItemGuardAccessor(0) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.to_out[0], 140581769902400) # hidden_states = attn.to_out[0](hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1776 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_out[0].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_out[0].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.to_out[0].training, 140591004393440) # hidden_states = attn.to_out[0](hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1776 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_out[1], accessed_by=GetItemGuardAccessor(1) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.to_out[1], 140581769902448) # hidden_states = attn.to_out[1](hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1778 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_out[1].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_out[1].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.to_out[1].training, 140591004393440) # hidden_states = attn.to_out[1](hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1778 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.add_k_proj, accessed_by=DictGetItemGuardAccessor(add_k_proj) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.add_k_proj, 140581769902208) # encoder_hidden_states_key_proj = attn.add_k_proj(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1736 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.add_k_proj.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.add_k_proj.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.add_k_proj.training, 140591004393440) # encoder_hidden_states_key_proj = attn.add_k_proj(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1736 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.add_q_proj, accessed_by=DictGetItemGuardAccessor(add_q_proj) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.add_q_proj, 140581769902304) # encoder_hidden_states_query_proj = attn.add_q_proj(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1735 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.add_q_proj.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.add_q_proj.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.add_q_proj.training, 140591004393440) # encoder_hidden_states_query_proj = attn.add_q_proj(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1735 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.add_v_proj, accessed_by=DictGetItemGuardAccessor(add_v_proj) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.add_v_proj, 140581769902256) # encoder_hidden_states_value_proj = attn.add_v_proj(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1737 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.add_v_proj.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.add_v_proj.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.add_v_proj.training, 140591004393440) # encoder_hidden_states_value_proj = attn.add_v_proj(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1737 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_add_out, accessed_by=DictGetItemGuardAccessor(to_add_out) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.to_add_out, 140581769902496) # encoder_hidden_states = attn.to_add_out(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1779 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_add_out.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_add_out.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.to_add_out.training, 140591004393440) # encoder_hidden_states = attn.to_add_out(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1779 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.norm_added_k, accessed_by=DictGetItemGuardAccessor(norm_added_k) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.norm_added_k, 140581769902640) # if attn.norm_added_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1751 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.norm_added_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[16].attn.norm_added_k.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.norm_added_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.norm_added_k.training, 140591004393440) # if attn.norm_added_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1751 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.norm_added_k.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[16].attn.norm_added_k.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.norm_added_k._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.norm_added_k.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.norm_added_k.weight, 140581772749264) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.norm_added_k._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.norm_added_k._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.norm_added_k._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.norm_added_k._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.norm_added_q, accessed_by=DictGetItemGuardAccessor(norm_added_q) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.norm_added_q, 140581769902544) # if attn.norm_added_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1749 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.norm_added_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[16].attn.norm_added_q.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.norm_added_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.norm_added_q.training, 140591004393440) # if attn.norm_added_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1749 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.norm_added_q.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[16].attn.norm_added_q.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.norm_added_q._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.norm_added_q.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.norm_added_q.weight, 140581772749344) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.norm_added_q._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.norm_added_q._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.norm_added_q._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.norm_added_q._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.heads, accessed_by=DictGetItemGuardAccessor(heads) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[16].attn.heads == 24 # head_dim = inner_dim // attn.heads # diffusers/src/diffusers/models/attention_processor.py:1721 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.processor, accessed_by=DictGetItemGuardAccessor(processor) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[16].attn.processor, 93831581524080) # attn_parameters = set(inspect.signature(self.processor.__call__).parameters.keys()) # diffusers/src/diffusers/models/attention_processor.py:479 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.processor, 140581769901776) # return self.processor( # diffusers/src/diffusers/models/attention_processor.py:490 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].norm1, accessed_by=DictGetItemGuardAccessor(norm1) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].norm1, 140581769901344) # norm_hidden_states, gate_msa, shift_mlp, scale_mlp, gate_mlp = self.norm1(hidden_states, emb=temb) # diffusers/src/diffusers/models/transformers/transformer_flux.py:165 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].norm1.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[16].norm1.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].norm1.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].norm1.training, 140591004393440) # norm_hidden_states, gate_msa, shift_mlp, scale_mlp, gate_mlp = self.norm1(hidden_states, emb=temb) # diffusers/src/diffusers/models/transformers/transformer_flux.py:165 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].norm1.emb, accessed_by=DictGetItemGuardAccessor(emb) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].norm1.emb, 140591004478624) # if self.emb is not None: # diffusers/src/diffusers/models/normalization.py:135 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].norm1._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].norm1.norm, accessed_by=DictGetItemGuardAccessor(norm) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].norm1.norm, 140581769901488) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:139 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].norm1.norm.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].norm1.norm.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].norm1.norm.training, 140591004393440) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:139 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].norm1.silu, accessed_by=DictGetItemGuardAccessor(silu) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].norm1.silu, 140581769901392) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].norm1.silu.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].norm1.silu.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].norm1.silu.training, 140591004393440) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].norm1.linear, accessed_by=DictGetItemGuardAccessor(linear) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].norm1.linear, 140581769901440) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].norm1.linear.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].norm1.linear.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].norm1.linear.training, 140591004393440) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].norm1._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].norm1._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].norm1._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].norm1._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].norm2, accessed_by=DictGetItemGuardAccessor(norm2) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].norm2, 140581769902688) # norm_hidden_states = self.norm2(hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:182 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].norm2.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].norm2.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].norm2.training, 140591004393440) # norm_hidden_states = self.norm2(hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:182 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff_context, accessed_by=DictGetItemGuardAccessor(ff_context) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].ff_context, 140581769903024) # context_ff_output = self.ff_context(norm_encoder_hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:198 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff_context.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[16].ff_context.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff_context.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].ff_context.training, 140591004393440) # context_ff_output = self.ff_context(norm_encoder_hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:198 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff_context._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff_context.net, accessed_by=DictGetItemGuardAccessor(net) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].ff_context.net, 140581769903168) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[16].ff_context.net, 93831537618768) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- LENGTH_CHECK: len(L['self'].transformer_blocks[16].ff_context.net) == 3 # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff_context.net.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff_context.net.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].ff_context.net.training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff_context.net[0], accessed_by=GetItemGuardAccessor(0) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].ff_context.net[0], 140581769903120) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff_context.net[0].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[16].ff_context.net[0].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff_context.net[0].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].ff_context.net[0].training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff_context.net[0]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff_context.net[0].proj, accessed_by=DictGetItemGuardAccessor(proj) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].ff_context.net[0].proj, 140581769903216) # hidden_states = self.proj(hidden_states) # diffusers/src/diffusers/models/activations.py:88 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff_context.net[0].proj.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff_context.net[0].proj.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].ff_context.net[0].proj.training, 140591004393440) # hidden_states = self.proj(hidden_states) # diffusers/src/diffusers/models/activations.py:88 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff_context.net[0].approximate, accessed_by=DictGetItemGuardAccessor(approximate) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[16].ff_context.net[0].approximate == 'tanh' # return F.gelu(gate, approximate=self.approximate) # diffusers/src/diffusers/models/activations.py:83 in gelu V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff_context.net[0]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff_context.net[0]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff_context.net[0]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff_context.net[0]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff_context.net[1], accessed_by=GetItemGuardAccessor(1) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].ff_context.net[1], 140581769903312) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff_context.net[1].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff_context.net[1].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].ff_context.net[1].training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff_context.net[2], accessed_by=GetItemGuardAccessor(2) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].ff_context.net[2], 140581769903360) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff_context.net[2].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff_context.net[2].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].ff_context.net[2].training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff_context._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff_context._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff_context._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff_context._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].norm1_context, accessed_by=DictGetItemGuardAccessor(norm1_context) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].norm1_context, 140581769901536) # norm_encoder_hidden_states, c_gate_msa, c_shift_mlp, c_scale_mlp, c_gate_mlp = self.norm1_context( # diffusers/src/diffusers/models/transformers/transformer_flux.py:167 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].norm1_context.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[16].norm1_context.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].norm1_context.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].norm1_context.training, 140591004393440) # norm_encoder_hidden_states, c_gate_msa, c_shift_mlp, c_scale_mlp, c_gate_mlp = self.norm1_context( # diffusers/src/diffusers/models/transformers/transformer_flux.py:167 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].norm1_context.emb, accessed_by=DictGetItemGuardAccessor(emb) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].norm1_context.emb, 140591004478624) # if self.emb is not None: # diffusers/src/diffusers/models/normalization.py:135 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].norm1_context._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].norm1_context.norm, accessed_by=DictGetItemGuardAccessor(norm) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].norm1_context.norm, 140581769901728) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:139 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].norm1_context.norm.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].norm1_context.norm.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].norm1_context.norm.training, 140591004393440) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:139 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].norm1_context.silu, accessed_by=DictGetItemGuardAccessor(silu) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].norm1_context.silu, 140581769901632) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].norm1_context.silu.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].norm1_context.silu.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].norm1_context.silu.training, 140591004393440) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].norm1_context.linear, accessed_by=DictGetItemGuardAccessor(linear) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].norm1_context.linear, 140581769901680) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].norm1_context.linear.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].norm1_context.linear.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].norm1_context.linear.training, 140591004393440) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].norm1_context._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].norm1_context._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].norm1_context._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].norm1_context._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].norm2_context, accessed_by=DictGetItemGuardAccessor(norm2_context) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].norm2_context, 140581769902736) # norm_encoder_hidden_states = self.norm2_context(encoder_hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:195 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].norm2_context.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].norm2_context.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].norm2_context.training, 140591004393440) # norm_encoder_hidden_states = self.norm2_context(encoder_hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:195 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | +- GuardManager: source=L['self'].transformer_blocks[17], accessed_by=GetItemGuardAccessor(17) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17], 140581769901008) # for index_block, block in enumerate(self.transformer_blocks): # diffusers/src/diffusers/models/transformers/transformer_flux.py:471 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[17].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].training, 140591004393440) # for index_block, block in enumerate(self.transformer_blocks): # diffusers/src/diffusers/models/transformers/transformer_flux.py:471 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff, accessed_by=DictGetItemGuardAccessor(ff) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].ff, 140581770183248) # ff_output = self.ff(norm_hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:185 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[17].ff.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].ff.training, 140591004393440) # ff_output = self.ff(norm_hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:185 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff.net, accessed_by=DictGetItemGuardAccessor(net) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].ff.net, 140581770183536) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[17].ff.net, 93831537618768) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- LENGTH_CHECK: len(L['self'].transformer_blocks[17].ff.net) == 3 # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff.net.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff.net.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].ff.net.training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff.net[0], accessed_by=GetItemGuardAccessor(0) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].ff.net[0], 140581770183488) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff.net[0].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[17].ff.net[0].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff.net[0].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].ff.net[0].training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff.net[0]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff.net[0].proj, accessed_by=DictGetItemGuardAccessor(proj) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].ff.net[0].proj, 140581770183584) # hidden_states = self.proj(hidden_states) # diffusers/src/diffusers/models/activations.py:88 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff.net[0].proj.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff.net[0].proj.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].ff.net[0].proj.training, 140591004393440) # hidden_states = self.proj(hidden_states) # diffusers/src/diffusers/models/activations.py:88 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff.net[0].approximate, accessed_by=DictGetItemGuardAccessor(approximate) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[17].ff.net[0].approximate == 'tanh' # return F.gelu(gate, approximate=self.approximate) # diffusers/src/diffusers/models/activations.py:83 in gelu V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff.net[0]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff.net[0]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff.net[0]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff.net[0]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff.net[1], accessed_by=GetItemGuardAccessor(1) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].ff.net[1], 140581770183632) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff.net[1].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff.net[1].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].ff.net[1].training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff.net[2], accessed_by=GetItemGuardAccessor(2) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].ff.net[2], 140581770183680) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff.net[2].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff.net[2].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].ff.net[2].training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn, accessed_by=DictGetItemGuardAccessor(attn) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn, 140581769903888) # attn_output, context_attn_output = self.attn( # diffusers/src/diffusers/models/transformers/transformer_flux.py:172 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[17].attn.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.training, 140591004393440) # attn_output, context_attn_output = self.attn( # diffusers/src/diffusers/models/transformers/transformer_flux.py:172 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_k, accessed_by=DictGetItemGuardAccessor(to_k) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.to_k, 140581770182720) # key = attn.to_k(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1717 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.to_k.training, 140591004393440) # key = attn.to_k(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1717 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_q, accessed_by=DictGetItemGuardAccessor(to_q) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.to_q, 140581769904032) # query = attn.to_q(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1716 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.to_q.training, 140591004393440) # query = attn.to_q(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1716 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_v, accessed_by=DictGetItemGuardAccessor(to_v) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.to_v, 140581770182816) # value = attn.to_v(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1718 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_v.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_v.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.to_v.training, 140591004393440) # value = attn.to_v(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1718 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.norm_k, accessed_by=DictGetItemGuardAccessor(norm_k) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.norm_k, 140581769904080) # if attn.norm_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1729 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.norm_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[17].attn.norm_k.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.norm_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.norm_k.training, 140591004393440) # if attn.norm_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1729 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.norm_k.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[17].attn.norm_k.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.norm_k._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.norm_k.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.norm_k.weight, 140581772778192) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.norm_k._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.norm_k._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.norm_k._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.norm_k._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.norm_q, accessed_by=DictGetItemGuardAccessor(norm_q) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.norm_q, 140581769903984) # if attn.norm_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1727 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.norm_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[17].attn.norm_q.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.norm_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.norm_q.training, 140591004393440) # if attn.norm_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1727 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.norm_q.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[17].attn.norm_q.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.norm_q._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.norm_q.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.norm_q.weight, 140581772741744) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.norm_q._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.norm_q._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.norm_q._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.norm_q._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_out, accessed_by=DictGetItemGuardAccessor(to_out) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.to_out, 140581770183008) # hidden_states = attn.to_out[0](hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1776 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_out.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_out.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.to_out.training, 140591004393440) # hidden_states = attn.to_out[0](hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1776 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_out[0], accessed_by=GetItemGuardAccessor(0) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.to_out[0], 140581770183056) # hidden_states = attn.to_out[0](hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1776 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_out[0].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_out[0].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.to_out[0].training, 140591004393440) # hidden_states = attn.to_out[0](hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1776 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_out[1], accessed_by=GetItemGuardAccessor(1) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.to_out[1], 140581770183104) # hidden_states = attn.to_out[1](hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1778 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_out[1].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_out[1].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.to_out[1].training, 140591004393440) # hidden_states = attn.to_out[1](hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1778 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.add_k_proj, accessed_by=DictGetItemGuardAccessor(add_k_proj) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.add_k_proj, 140581770182864) # encoder_hidden_states_key_proj = attn.add_k_proj(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1736 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.add_k_proj.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.add_k_proj.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.add_k_proj.training, 140591004393440) # encoder_hidden_states_key_proj = attn.add_k_proj(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1736 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.add_q_proj, accessed_by=DictGetItemGuardAccessor(add_q_proj) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.add_q_proj, 140581770182960) # encoder_hidden_states_query_proj = attn.add_q_proj(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1735 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.add_q_proj.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.add_q_proj.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.add_q_proj.training, 140591004393440) # encoder_hidden_states_query_proj = attn.add_q_proj(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1735 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.add_v_proj, accessed_by=DictGetItemGuardAccessor(add_v_proj) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.add_v_proj, 140581770182912) # encoder_hidden_states_value_proj = attn.add_v_proj(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1737 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.add_v_proj.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.add_v_proj.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.add_v_proj.training, 140591004393440) # encoder_hidden_states_value_proj = attn.add_v_proj(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1737 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_add_out, accessed_by=DictGetItemGuardAccessor(to_add_out) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.to_add_out, 140581770183152) # encoder_hidden_states = attn.to_add_out(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1779 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_add_out.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_add_out.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.to_add_out.training, 140591004393440) # encoder_hidden_states = attn.to_add_out(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1779 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.norm_added_k, accessed_by=DictGetItemGuardAccessor(norm_added_k) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.norm_added_k, 140581770183296) # if attn.norm_added_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1751 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.norm_added_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[17].attn.norm_added_k.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.norm_added_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.norm_added_k.training, 140591004393440) # if attn.norm_added_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1751 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.norm_added_k.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[17].attn.norm_added_k.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.norm_added_k._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.norm_added_k.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.norm_added_k.weight, 140581772742144) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.norm_added_k._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.norm_added_k._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.norm_added_k._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.norm_added_k._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.norm_added_q, accessed_by=DictGetItemGuardAccessor(norm_added_q) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.norm_added_q, 140581770183200) # if attn.norm_added_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1749 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.norm_added_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[17].attn.norm_added_q.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.norm_added_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.norm_added_q.training, 140591004393440) # if attn.norm_added_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1749 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.norm_added_q.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[17].attn.norm_added_q.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.norm_added_q._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.norm_added_q.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.norm_added_q.weight, 140581772745504) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.norm_added_q._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.norm_added_q._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.norm_added_q._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.norm_added_q._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.heads, accessed_by=DictGetItemGuardAccessor(heads) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[17].attn.heads == 24 # head_dim = inner_dim // attn.heads # diffusers/src/diffusers/models/attention_processor.py:1721 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.processor, accessed_by=DictGetItemGuardAccessor(processor) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[17].attn.processor, 93831581524080) # attn_parameters = set(inspect.signature(self.processor.__call__).parameters.keys()) # diffusers/src/diffusers/models/attention_processor.py:479 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.processor, 140581769903840) # return self.processor( # diffusers/src/diffusers/models/attention_processor.py:490 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].norm1, accessed_by=DictGetItemGuardAccessor(norm1) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].norm1, 140581769903408) # norm_hidden_states, gate_msa, shift_mlp, scale_mlp, gate_mlp = self.norm1(hidden_states, emb=temb) # diffusers/src/diffusers/models/transformers/transformer_flux.py:165 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].norm1.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[17].norm1.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].norm1.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].norm1.training, 140591004393440) # norm_hidden_states, gate_msa, shift_mlp, scale_mlp, gate_mlp = self.norm1(hidden_states, emb=temb) # diffusers/src/diffusers/models/transformers/transformer_flux.py:165 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].norm1.emb, accessed_by=DictGetItemGuardAccessor(emb) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].norm1.emb, 140591004478624) # if self.emb is not None: # diffusers/src/diffusers/models/normalization.py:135 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].norm1._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].norm1.norm, accessed_by=DictGetItemGuardAccessor(norm) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].norm1.norm, 140581769903552) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:139 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].norm1.norm.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].norm1.norm.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].norm1.norm.training, 140591004393440) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:139 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].norm1.silu, accessed_by=DictGetItemGuardAccessor(silu) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].norm1.silu, 140581769903456) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].norm1.silu.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].norm1.silu.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].norm1.silu.training, 140591004393440) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].norm1.linear, accessed_by=DictGetItemGuardAccessor(linear) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].norm1.linear, 140581769903504) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].norm1.linear.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].norm1.linear.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].norm1.linear.training, 140591004393440) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].norm1._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].norm1._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].norm1._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].norm1._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].norm2, accessed_by=DictGetItemGuardAccessor(norm2) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].norm2, 140581770183344) # norm_hidden_states = self.norm2(hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:182 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].norm2.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].norm2.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].norm2.training, 140591004393440) # norm_hidden_states = self.norm2(hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:182 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff_context, accessed_by=DictGetItemGuardAccessor(ff_context) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].ff_context, 140581770183728) # context_ff_output = self.ff_context(norm_encoder_hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:198 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff_context.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[17].ff_context.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff_context.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].ff_context.training, 140591004393440) # context_ff_output = self.ff_context(norm_encoder_hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:198 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff_context._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff_context.net, accessed_by=DictGetItemGuardAccessor(net) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].ff_context.net, 140581770183872) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[17].ff_context.net, 93831537618768) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- LENGTH_CHECK: len(L['self'].transformer_blocks[17].ff_context.net) == 3 # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff_context.net.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff_context.net.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].ff_context.net.training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff_context.net[0], accessed_by=GetItemGuardAccessor(0) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].ff_context.net[0], 140581770183824) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff_context.net[0].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[17].ff_context.net[0].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff_context.net[0].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].ff_context.net[0].training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff_context.net[0]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff_context.net[0].proj, accessed_by=DictGetItemGuardAccessor(proj) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].ff_context.net[0].proj, 140581770183920) # hidden_states = self.proj(hidden_states) # diffusers/src/diffusers/models/activations.py:88 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff_context.net[0].proj.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff_context.net[0].proj.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].ff_context.net[0].proj.training, 140591004393440) # hidden_states = self.proj(hidden_states) # diffusers/src/diffusers/models/activations.py:88 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff_context.net[0].approximate, accessed_by=DictGetItemGuardAccessor(approximate) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[17].ff_context.net[0].approximate == 'tanh' # return F.gelu(gate, approximate=self.approximate) # diffusers/src/diffusers/models/activations.py:83 in gelu V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff_context.net[0]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff_context.net[0]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff_context.net[0]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff_context.net[0]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff_context.net[1], accessed_by=GetItemGuardAccessor(1) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].ff_context.net[1], 140581770184016) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff_context.net[1].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff_context.net[1].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].ff_context.net[1].training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff_context.net[2], accessed_by=GetItemGuardAccessor(2) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].ff_context.net[2], 140581770184064) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff_context.net[2].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff_context.net[2].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].ff_context.net[2].training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff_context._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff_context._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff_context._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff_context._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].norm1_context, accessed_by=DictGetItemGuardAccessor(norm1_context) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].norm1_context, 140581769903600) # norm_encoder_hidden_states, c_gate_msa, c_shift_mlp, c_scale_mlp, c_gate_mlp = self.norm1_context( # diffusers/src/diffusers/models/transformers/transformer_flux.py:167 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].norm1_context.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[17].norm1_context.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].norm1_context.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].norm1_context.training, 140591004393440) # norm_encoder_hidden_states, c_gate_msa, c_shift_mlp, c_scale_mlp, c_gate_mlp = self.norm1_context( # diffusers/src/diffusers/models/transformers/transformer_flux.py:167 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].norm1_context.emb, accessed_by=DictGetItemGuardAccessor(emb) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].norm1_context.emb, 140591004478624) # if self.emb is not None: # diffusers/src/diffusers/models/normalization.py:135 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].norm1_context._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].norm1_context.norm, accessed_by=DictGetItemGuardAccessor(norm) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].norm1_context.norm, 140581769903792) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:139 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].norm1_context.norm.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].norm1_context.norm.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].norm1_context.norm.training, 140591004393440) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:139 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].norm1_context.silu, accessed_by=DictGetItemGuardAccessor(silu) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].norm1_context.silu, 140581769903696) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].norm1_context.silu.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].norm1_context.silu.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].norm1_context.silu.training, 140591004393440) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].norm1_context.linear, accessed_by=DictGetItemGuardAccessor(linear) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].norm1_context.linear, 140581769903744) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].norm1_context.linear.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].norm1_context.linear.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].norm1_context.linear.training, 140591004393440) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].norm1_context._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].norm1_context._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].norm1_context._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].norm1_context._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].norm2_context, accessed_by=DictGetItemGuardAccessor(norm2_context) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].norm2_context, 140581770183392) # norm_encoder_hidden_states = self.norm2_context(encoder_hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:195 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].norm2_context.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].norm2_context.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].norm2_context.training, 140591004393440) # norm_encoder_hidden_states = self.norm2_context(encoder_hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:195 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | +- GuardManager: source=L['self'].transformer_blocks[18], accessed_by=GetItemGuardAccessor(18) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18], 140581769903072) # for index_block, block in enumerate(self.transformer_blocks): # diffusers/src/diffusers/models/transformers/transformer_flux.py:471 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[18].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].training, 140591004393440) # for index_block, block in enumerate(self.transformer_blocks): # diffusers/src/diffusers/models/transformers/transformer_flux.py:471 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff, accessed_by=DictGetItemGuardAccessor(ff) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].ff, 140581770185360) # ff_output = self.ff(norm_hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:185 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[18].ff.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].ff.training, 140591004393440) # ff_output = self.ff(norm_hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:185 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff.net, accessed_by=DictGetItemGuardAccessor(net) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].ff.net, 140581770185600) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[18].ff.net, 93831537618768) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- LENGTH_CHECK: len(L['self'].transformer_blocks[18].ff.net) == 3 # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff.net.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff.net.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].ff.net.training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff.net[0], accessed_by=GetItemGuardAccessor(0) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].ff.net[0], 140581770185552) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff.net[0].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[18].ff.net[0].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff.net[0].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].ff.net[0].training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff.net[0]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff.net[0].proj, accessed_by=DictGetItemGuardAccessor(proj) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].ff.net[0].proj, 140581770185648) # hidden_states = self.proj(hidden_states) # diffusers/src/diffusers/models/activations.py:88 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff.net[0].proj.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff.net[0].proj.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].ff.net[0].proj.training, 140591004393440) # hidden_states = self.proj(hidden_states) # diffusers/src/diffusers/models/activations.py:88 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff.net[0].approximate, accessed_by=DictGetItemGuardAccessor(approximate) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[18].ff.net[0].approximate == 'tanh' # return F.gelu(gate, approximate=self.approximate) # diffusers/src/diffusers/models/activations.py:83 in gelu V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff.net[0]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff.net[0]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff.net[0]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff.net[0]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff.net[1], accessed_by=GetItemGuardAccessor(1) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].ff.net[1], 140581770185696) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff.net[1].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff.net[1].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].ff.net[1].training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff.net[2], accessed_by=GetItemGuardAccessor(2) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].ff.net[2], 140581770185744) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff.net[2].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff.net[2].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].ff.net[2].training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn, accessed_by=DictGetItemGuardAccessor(attn) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn, 140581770184592) # attn_output, context_attn_output = self.attn( # diffusers/src/diffusers/models/transformers/transformer_flux.py:172 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[18].attn.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.training, 140591004393440) # attn_output, context_attn_output = self.attn( # diffusers/src/diffusers/models/transformers/transformer_flux.py:172 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_k, accessed_by=DictGetItemGuardAccessor(to_k) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.to_k, 140581770184736) # key = attn.to_k(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1717 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.to_k.training, 140591004393440) # key = attn.to_k(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1717 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_q, accessed_by=DictGetItemGuardAccessor(to_q) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.to_q, 140581770184832) # query = attn.to_q(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1716 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.to_q.training, 140591004393440) # query = attn.to_q(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1716 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_v, accessed_by=DictGetItemGuardAccessor(to_v) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.to_v, 140581770184928) # value = attn.to_v(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1718 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_v.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_v.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.to_v.training, 140591004393440) # value = attn.to_v(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1718 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.norm_k, accessed_by=DictGetItemGuardAccessor(norm_k) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.norm_k, 140581770184784) # if attn.norm_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1729 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.norm_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[18].attn.norm_k.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.norm_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.norm_k.training, 140591004393440) # if attn.norm_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1729 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.norm_k.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[18].attn.norm_k.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.norm_k._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.norm_k.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.norm_k.weight, 140581772745904) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.norm_k._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.norm_k._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.norm_k._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.norm_k._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.norm_q, accessed_by=DictGetItemGuardAccessor(norm_q) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.norm_q, 140581770184688) # if attn.norm_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1727 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.norm_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[18].attn.norm_q.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.norm_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.norm_q.training, 140591004393440) # if attn.norm_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1727 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.norm_q.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[18].attn.norm_q.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.norm_q._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.norm_q.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.norm_q.weight, 140581772745664) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.norm_q._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.norm_q._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.norm_q._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.norm_q._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_out, accessed_by=DictGetItemGuardAccessor(to_out) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.to_out, 140581770185120) # hidden_states = attn.to_out[0](hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1776 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_out.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_out.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.to_out.training, 140591004393440) # hidden_states = attn.to_out[0](hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1776 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_out[0], accessed_by=GetItemGuardAccessor(0) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.to_out[0], 140581770185168) # hidden_states = attn.to_out[0](hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1776 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_out[0].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_out[0].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.to_out[0].training, 140591004393440) # hidden_states = attn.to_out[0](hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1776 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_out[1], accessed_by=GetItemGuardAccessor(1) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.to_out[1], 140581770185216) # hidden_states = attn.to_out[1](hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1778 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_out[1].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_out[1].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.to_out[1].training, 140591004393440) # hidden_states = attn.to_out[1](hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1778 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.add_k_proj, accessed_by=DictGetItemGuardAccessor(add_k_proj) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.add_k_proj, 140581770184976) # encoder_hidden_states_key_proj = attn.add_k_proj(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1736 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.add_k_proj.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.add_k_proj.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.add_k_proj.training, 140591004393440) # encoder_hidden_states_key_proj = attn.add_k_proj(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1736 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.add_q_proj, accessed_by=DictGetItemGuardAccessor(add_q_proj) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.add_q_proj, 140581770185072) # encoder_hidden_states_query_proj = attn.add_q_proj(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1735 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.add_q_proj.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.add_q_proj.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.add_q_proj.training, 140591004393440) # encoder_hidden_states_query_proj = attn.add_q_proj(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1735 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.add_v_proj, accessed_by=DictGetItemGuardAccessor(add_v_proj) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.add_v_proj, 140581770185024) # encoder_hidden_states_value_proj = attn.add_v_proj(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1737 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.add_v_proj.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.add_v_proj.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.add_v_proj.training, 140591004393440) # encoder_hidden_states_value_proj = attn.add_v_proj(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1737 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_add_out, accessed_by=DictGetItemGuardAccessor(to_add_out) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.to_add_out, 140581770185264) # encoder_hidden_states = attn.to_add_out(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1779 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_add_out.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_add_out.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.to_add_out.training, 140591004393440) # encoder_hidden_states = attn.to_add_out(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1779 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.norm_added_k, accessed_by=DictGetItemGuardAccessor(norm_added_k) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.norm_added_k, 140581770185408) # if attn.norm_added_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1751 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.norm_added_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[18].attn.norm_added_k.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.norm_added_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.norm_added_k.training, 140591004393440) # if attn.norm_added_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1751 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.norm_added_k.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[18].attn.norm_added_k.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.norm_added_k._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.norm_added_k.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.norm_added_k.weight, 140581773242944) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.norm_added_k._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.norm_added_k._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.norm_added_k._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.norm_added_k._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.norm_added_q, accessed_by=DictGetItemGuardAccessor(norm_added_q) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.norm_added_q, 140581770185312) # if attn.norm_added_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1749 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.norm_added_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[18].attn.norm_added_q.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.norm_added_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.norm_added_q.training, 140591004393440) # if attn.norm_added_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1749 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.norm_added_q.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[18].attn.norm_added_q.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.norm_added_q._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.norm_added_q.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.norm_added_q.weight, 140581773236704) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.norm_added_q._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.norm_added_q._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.norm_added_q._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.norm_added_q._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.heads, accessed_by=DictGetItemGuardAccessor(heads) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[18].attn.heads == 24 # head_dim = inner_dim // attn.heads # diffusers/src/diffusers/models/attention_processor.py:1721 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.processor, accessed_by=DictGetItemGuardAccessor(processor) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[18].attn.processor, 93831581524080) # attn_parameters = set(inspect.signature(self.processor.__call__).parameters.keys()) # diffusers/src/diffusers/models/attention_processor.py:479 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.processor, 140581770184544) # return self.processor( # diffusers/src/diffusers/models/attention_processor.py:490 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].norm1, accessed_by=DictGetItemGuardAccessor(norm1) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].norm1, 140581770184112) # norm_hidden_states, gate_msa, shift_mlp, scale_mlp, gate_mlp = self.norm1(hidden_states, emb=temb) # diffusers/src/diffusers/models/transformers/transformer_flux.py:165 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].norm1.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[18].norm1.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].norm1.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].norm1.training, 140591004393440) # norm_hidden_states, gate_msa, shift_mlp, scale_mlp, gate_mlp = self.norm1(hidden_states, emb=temb) # diffusers/src/diffusers/models/transformers/transformer_flux.py:165 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].norm1.emb, accessed_by=DictGetItemGuardAccessor(emb) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].norm1.emb, 140591004478624) # if self.emb is not None: # diffusers/src/diffusers/models/normalization.py:135 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].norm1._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].norm1.norm, accessed_by=DictGetItemGuardAccessor(norm) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].norm1.norm, 140581770184256) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:139 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].norm1.norm.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].norm1.norm.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].norm1.norm.training, 140591004393440) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:139 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].norm1.silu, accessed_by=DictGetItemGuardAccessor(silu) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].norm1.silu, 140581770184160) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].norm1.silu.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].norm1.silu.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].norm1.silu.training, 140591004393440) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].norm1.linear, accessed_by=DictGetItemGuardAccessor(linear) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].norm1.linear, 140581770184208) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].norm1.linear.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].norm1.linear.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].norm1.linear.training, 140591004393440) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].norm1._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].norm1._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].norm1._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].norm1._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].norm2, accessed_by=DictGetItemGuardAccessor(norm2) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].norm2, 140581770185456) # norm_hidden_states = self.norm2(hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:182 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].norm2.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].norm2.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].norm2.training, 140591004393440) # norm_hidden_states = self.norm2(hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:182 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff_context, accessed_by=DictGetItemGuardAccessor(ff_context) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].ff_context, 140581770185792) # context_ff_output = self.ff_context(norm_encoder_hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:198 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff_context.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[18].ff_context.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff_context.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].ff_context.training, 140591004393440) # context_ff_output = self.ff_context(norm_encoder_hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:198 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff_context._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff_context.net, accessed_by=DictGetItemGuardAccessor(net) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].ff_context.net, 140581770185936) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[18].ff_context.net, 93831537618768) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- LENGTH_CHECK: len(L['self'].transformer_blocks[18].ff_context.net) == 3 # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff_context.net.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff_context.net.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].ff_context.net.training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff_context.net[0], accessed_by=GetItemGuardAccessor(0) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].ff_context.net[0], 140581770185888) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff_context.net[0].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[18].ff_context.net[0].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff_context.net[0].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].ff_context.net[0].training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff_context.net[0]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff_context.net[0].proj, accessed_by=DictGetItemGuardAccessor(proj) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].ff_context.net[0].proj, 140581770185984) # hidden_states = self.proj(hidden_states) # diffusers/src/diffusers/models/activations.py:88 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff_context.net[0].proj.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff_context.net[0].proj.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].ff_context.net[0].proj.training, 140591004393440) # hidden_states = self.proj(hidden_states) # diffusers/src/diffusers/models/activations.py:88 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff_context.net[0].approximate, accessed_by=DictGetItemGuardAccessor(approximate) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[18].ff_context.net[0].approximate == 'tanh' # return F.gelu(gate, approximate=self.approximate) # diffusers/src/diffusers/models/activations.py:83 in gelu V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff_context.net[0]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff_context.net[0]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff_context.net[0]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff_context.net[0]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff_context.net[1], accessed_by=GetItemGuardAccessor(1) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].ff_context.net[1], 140581770186080) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff_context.net[1].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff_context.net[1].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].ff_context.net[1].training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff_context.net[2], accessed_by=GetItemGuardAccessor(2) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].ff_context.net[2], 140581770186128) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff_context.net[2].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff_context.net[2].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].ff_context.net[2].training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff_context._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff_context._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff_context._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff_context._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].norm1_context, accessed_by=DictGetItemGuardAccessor(norm1_context) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].norm1_context, 140581770184304) # norm_encoder_hidden_states, c_gate_msa, c_shift_mlp, c_scale_mlp, c_gate_mlp = self.norm1_context( # diffusers/src/diffusers/models/transformers/transformer_flux.py:167 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].norm1_context.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[18].norm1_context.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].norm1_context.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].norm1_context.training, 140591004393440) # norm_encoder_hidden_states, c_gate_msa, c_shift_mlp, c_scale_mlp, c_gate_mlp = self.norm1_context( # diffusers/src/diffusers/models/transformers/transformer_flux.py:167 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].norm1_context.emb, accessed_by=DictGetItemGuardAccessor(emb) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].norm1_context.emb, 140591004478624) # if self.emb is not None: # diffusers/src/diffusers/models/normalization.py:135 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].norm1_context._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].norm1_context.norm, accessed_by=DictGetItemGuardAccessor(norm) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].norm1_context.norm, 140581770184496) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:139 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].norm1_context.norm.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].norm1_context.norm.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].norm1_context.norm.training, 140591004393440) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:139 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].norm1_context.silu, accessed_by=DictGetItemGuardAccessor(silu) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].norm1_context.silu, 140581770184400) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].norm1_context.silu.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].norm1_context.silu.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].norm1_context.silu.training, 140591004393440) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].norm1_context.linear, accessed_by=DictGetItemGuardAccessor(linear) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].norm1_context.linear, 140581770184448) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].norm1_context.linear.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].norm1_context.linear.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].norm1_context.linear.training, 140591004393440) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].norm1_context._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].norm1_context._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].norm1_context._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].norm1_context._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].norm2_context, accessed_by=DictGetItemGuardAccessor(norm2_context) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].norm2_context, 140581770185504) # norm_encoder_hidden_states = self.norm2_context(encoder_hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:195 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].norm2_context.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].norm2_context.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].norm2_context.training, 140591004393440) # norm_encoder_hidden_states = self.norm2_context(encoder_hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:195 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | +- GuardManager: source=L['self'].single_transformer_blocks, accessed_by=DictGetItemGuardAccessor(single_transformer_blocks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks, 140581770185840) # for index_block, block in enumerate(self.single_transformer_blocks): # diffusers/src/diffusers/models/transformers/transformer_flux.py:509 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | +- GuardManager: source=L['self'].single_transformer_blocks.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks.training, 140591004393440) # for index_block, block in enumerate(self.single_transformer_blocks): # diffusers/src/diffusers/models/transformers/transformer_flux.py:509 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0], accessed_by=GetItemGuardAccessor(0) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[0], 140581770183776) # for index_block, block in enumerate(self.single_transformer_blocks): # diffusers/src/diffusers/models/transformers/transformer_flux.py:509 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[0].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[0].training, 140591004393440) # for index_block, block in enumerate(self.single_transformer_blocks): # diffusers/src/diffusers/models/transformers/transformer_flux.py:509 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].attn, accessed_by=DictGetItemGuardAccessor(attn) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[0].attn, 140581770186656) # attn_output = self.attn( # diffusers/src/diffusers/models/transformers/transformer_flux.py:91 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].attn.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[0].attn.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].attn.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[0].attn.training, 140591004393440) # attn_output = self.attn( # diffusers/src/diffusers/models/transformers/transformer_flux.py:91 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].attn._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].attn.to_k, accessed_by=DictGetItemGuardAccessor(to_k) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[0].attn.to_k, 140581770186800) # key = attn.to_k(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1717 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].attn.to_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].attn.to_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[0].attn.to_k.training, 140591004393440) # key = attn.to_k(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1717 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].attn.to_q, accessed_by=DictGetItemGuardAccessor(to_q) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[0].attn.to_q, 140581770186896) # query = attn.to_q(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1716 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].attn.to_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].attn.to_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[0].attn.to_q.training, 140591004393440) # query = attn.to_q(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1716 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].attn.to_v, accessed_by=DictGetItemGuardAccessor(to_v) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[0].attn.to_v, 140581770186944) # value = attn.to_v(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1718 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].attn.to_v.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].attn.to_v.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[0].attn.to_v.training, 140591004393440) # value = attn.to_v(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1718 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].attn.norm_k, accessed_by=DictGetItemGuardAccessor(norm_k) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[0].attn.norm_k, 140581770186848) # if attn.norm_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1729 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].attn.norm_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[0].attn.norm_k.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].attn.norm_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[0].attn.norm_k.training, 140591004393440) # if attn.norm_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1729 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].attn.norm_k.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[0].attn.norm_k.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].attn.norm_k._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].attn.norm_k.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[0].attn.norm_k.weight, 140581765130624) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].attn.norm_k._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].attn.norm_k._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].attn.norm_k._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].attn.norm_k._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].attn.norm_q, accessed_by=DictGetItemGuardAccessor(norm_q) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[0].attn.norm_q, 140581770186704) # if attn.norm_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1727 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].attn.norm_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[0].attn.norm_q.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].attn.norm_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[0].attn.norm_q.training, 140591004393440) # if attn.norm_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1727 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].attn.norm_q.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[0].attn.norm_q.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].attn.norm_q._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].attn.norm_q.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[0].attn.norm_q.weight, 140581765886208) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].attn.norm_q._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].attn.norm_q._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].attn.norm_q._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].attn.norm_q._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].attn.heads, accessed_by=DictGetItemGuardAccessor(heads) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[0].attn.heads == 24 # head_dim = inner_dim // attn.heads # diffusers/src/diffusers/models/attention_processor.py:1721 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].attn.processor, accessed_by=DictGetItemGuardAccessor(processor) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[0].attn.processor, 93831581524080) # attn_parameters = set(inspect.signature(self.processor.__call__).parameters.keys()) # diffusers/src/diffusers/models/attention_processor.py:479 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[0].attn.processor, 140581770186608) # return self.processor( # diffusers/src/diffusers/models/attention_processor.py:490 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].attn._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].attn._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].attn._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].attn._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].attn.forward, accessed_by=GetAttrGuardAccessor(forward) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].attn.forward, accessed_by=FuncDefaultsGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].attn.forward.__defaults__[0], accessed_by=GetItemGuardAccessor(0) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[0].attn.forward.__defaults__[0], 140591004478624) # batch_size, _, _ = hidden_states.shape if encoder_hidden_states is None else encoder_hidden_states.shape # diffusers/src/diffusers/models/attention_processor.py:1713 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].norm, accessed_by=DictGetItemGuardAccessor(norm) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[0].norm, 140581770186224) # norm_hidden_states, gate = self.norm(hidden_states, emb=temb) # diffusers/src/diffusers/models/transformers/transformer_flux.py:88 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].norm.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[0].norm.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].norm.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[0].norm.training, 140591004393440) # norm_hidden_states, gate = self.norm(hidden_states, emb=temb) # diffusers/src/diffusers/models/transformers/transformer_flux.py:88 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].norm._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].norm.norm, accessed_by=DictGetItemGuardAccessor(norm) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[0].norm.norm, 140581770186368) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:171 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].norm.norm.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].norm.norm.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[0].norm.norm.training, 140591004393440) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:171 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].norm.silu, accessed_by=DictGetItemGuardAccessor(silu) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[0].norm.silu, 140581770186272) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].norm.silu.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].norm.silu.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[0].norm.silu.training, 140591004393440) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].norm.linear, accessed_by=DictGetItemGuardAccessor(linear) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[0].norm.linear, 140581770186320) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].norm.linear.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].norm.linear.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[0].norm.linear.training, 140591004393440) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].norm._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].norm._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].norm._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].norm._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].act_mlp, accessed_by=DictGetItemGuardAccessor(act_mlp) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[0].act_mlp, 140581770186512) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].act_mlp.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].act_mlp.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[0].act_mlp.training, 140591004393440) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].proj_mlp, accessed_by=DictGetItemGuardAccessor(proj_mlp) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[0].proj_mlp, 140581770186464) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].proj_mlp.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].proj_mlp.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[0].proj_mlp.training, 140591004393440) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].proj_out, accessed_by=DictGetItemGuardAccessor(proj_out) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[0].proj_out, 140581770186560) # hidden_states = gate * self.proj_out(hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:98 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].proj_out.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].proj_out.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[0].proj_out.training, 140591004393440) # hidden_states = gate * self.proj_out(hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:98 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1], accessed_by=GetItemGuardAccessor(1) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[1], 140581770186176) # for index_block, block in enumerate(self.single_transformer_blocks): # diffusers/src/diffusers/models/transformers/transformer_flux.py:509 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[1].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[1].training, 140591004393440) # for index_block, block in enumerate(self.single_transformer_blocks): # diffusers/src/diffusers/models/transformers/transformer_flux.py:509 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].attn, accessed_by=DictGetItemGuardAccessor(attn) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[1].attn, 140581770187424) # attn_output = self.attn( # diffusers/src/diffusers/models/transformers/transformer_flux.py:91 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].attn.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[1].attn.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].attn.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[1].attn.training, 140591004393440) # attn_output = self.attn( # diffusers/src/diffusers/models/transformers/transformer_flux.py:91 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].attn._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].attn.to_k, accessed_by=DictGetItemGuardAccessor(to_k) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[1].attn.to_k, 140581770187568) # key = attn.to_k(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1717 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].attn.to_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].attn.to_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[1].attn.to_k.training, 140591004393440) # key = attn.to_k(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1717 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].attn.to_q, accessed_by=DictGetItemGuardAccessor(to_q) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[1].attn.to_q, 140581770187664) # query = attn.to_q(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1716 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].attn.to_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].attn.to_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[1].attn.to_q.training, 140591004393440) # query = attn.to_q(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1716 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].attn.to_v, accessed_by=DictGetItemGuardAccessor(to_v) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[1].attn.to_v, 140581770187712) # value = attn.to_v(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1718 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].attn.to_v.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].attn.to_v.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[1].attn.to_v.training, 140591004393440) # value = attn.to_v(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1718 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].attn.norm_k, accessed_by=DictGetItemGuardAccessor(norm_k) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[1].attn.norm_k, 140581770187616) # if attn.norm_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1729 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].attn.norm_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[1].attn.norm_k.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].attn.norm_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[1].attn.norm_k.training, 140591004393440) # if attn.norm_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1729 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].attn.norm_k.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[1].attn.norm_k.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].attn.norm_k._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].attn.norm_k.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[1].attn.norm_k.weight, 140581765129024) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].attn.norm_k._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].attn.norm_k._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].attn.norm_k._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].attn.norm_k._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].attn.norm_q, accessed_by=DictGetItemGuardAccessor(norm_q) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[1].attn.norm_q, 140581770187472) # if attn.norm_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1727 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].attn.norm_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[1].attn.norm_q.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].attn.norm_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[1].attn.norm_q.training, 140591004393440) # if attn.norm_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1727 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].attn.norm_q.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[1].attn.norm_q.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].attn.norm_q._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].attn.norm_q.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[1].attn.norm_q.weight, 140581783344112) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].attn.norm_q._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].attn.norm_q._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].attn.norm_q._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].attn.norm_q._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].attn.heads, accessed_by=DictGetItemGuardAccessor(heads) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[1].attn.heads == 24 # head_dim = inner_dim // attn.heads # diffusers/src/diffusers/models/attention_processor.py:1721 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].attn.processor, accessed_by=DictGetItemGuardAccessor(processor) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[1].attn.processor, 93831581524080) # attn_parameters = set(inspect.signature(self.processor.__call__).parameters.keys()) # diffusers/src/diffusers/models/attention_processor.py:479 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[1].attn.processor, 140581770187376) # return self.processor( # diffusers/src/diffusers/models/attention_processor.py:490 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].attn._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].attn._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].attn._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].attn._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].attn.forward, accessed_by=GetAttrGuardAccessor(forward) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].attn.forward, accessed_by=FuncDefaultsGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].attn.forward.__defaults__[0], accessed_by=GetItemGuardAccessor(0) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[1].attn.forward.__defaults__[0], 140591004478624) # batch_size, _, _ = hidden_states.shape if encoder_hidden_states is None else encoder_hidden_states.shape # diffusers/src/diffusers/models/attention_processor.py:1713 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].norm, accessed_by=DictGetItemGuardAccessor(norm) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[1].norm, 140581770187040) # norm_hidden_states, gate = self.norm(hidden_states, emb=temb) # diffusers/src/diffusers/models/transformers/transformer_flux.py:88 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].norm.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[1].norm.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].norm.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[1].norm.training, 140591004393440) # norm_hidden_states, gate = self.norm(hidden_states, emb=temb) # diffusers/src/diffusers/models/transformers/transformer_flux.py:88 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].norm._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].norm.norm, accessed_by=DictGetItemGuardAccessor(norm) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[1].norm.norm, 140581770187184) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:171 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].norm.norm.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].norm.norm.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[1].norm.norm.training, 140591004393440) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:171 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].norm.silu, accessed_by=DictGetItemGuardAccessor(silu) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[1].norm.silu, 140581770187088) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].norm.silu.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].norm.silu.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[1].norm.silu.training, 140591004393440) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].norm.linear, accessed_by=DictGetItemGuardAccessor(linear) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[1].norm.linear, 140581770187136) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].norm.linear.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].norm.linear.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[1].norm.linear.training, 140591004393440) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].norm._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].norm._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].norm._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].norm._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].act_mlp, accessed_by=DictGetItemGuardAccessor(act_mlp) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[1].act_mlp, 140581770187280) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].act_mlp.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].act_mlp.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[1].act_mlp.training, 140591004393440) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].proj_mlp, accessed_by=DictGetItemGuardAccessor(proj_mlp) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[1].proj_mlp, 140581770187232) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].proj_mlp.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].proj_mlp.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[1].proj_mlp.training, 140591004393440) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].proj_out, accessed_by=DictGetItemGuardAccessor(proj_out) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[1].proj_out, 140581770187328) # hidden_states = gate * self.proj_out(hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:98 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].proj_out.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].proj_out.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[1].proj_out.training, 140591004393440) # hidden_states = gate * self.proj_out(hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:98 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2], accessed_by=GetItemGuardAccessor(2) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[2], 140581770186992) # for index_block, block in enumerate(self.single_transformer_blocks): # diffusers/src/diffusers/models/transformers/transformer_flux.py:509 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[2].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[2].training, 140591004393440) # for index_block, block in enumerate(self.single_transformer_blocks): # diffusers/src/diffusers/models/transformers/transformer_flux.py:509 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].attn, accessed_by=DictGetItemGuardAccessor(attn) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[2].attn, 140581770188192) # attn_output = self.attn( # diffusers/src/diffusers/models/transformers/transformer_flux.py:91 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].attn.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[2].attn.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].attn.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[2].attn.training, 140591004393440) # attn_output = self.attn( # diffusers/src/diffusers/models/transformers/transformer_flux.py:91 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].attn._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].attn.to_k, accessed_by=DictGetItemGuardAccessor(to_k) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[2].attn.to_k, 140581770188336) # key = attn.to_k(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1717 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].attn.to_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].attn.to_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[2].attn.to_k.training, 140591004393440) # key = attn.to_k(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1717 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].attn.to_q, accessed_by=DictGetItemGuardAccessor(to_q) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[2].attn.to_q, 140581770188432) # query = attn.to_q(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1716 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].attn.to_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].attn.to_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[2].attn.to_q.training, 140591004393440) # query = attn.to_q(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1716 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].attn.to_v, accessed_by=DictGetItemGuardAccessor(to_v) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[2].attn.to_v, 140581770188480) # value = attn.to_v(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1718 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].attn.to_v.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].attn.to_v.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[2].attn.to_v.training, 140591004393440) # value = attn.to_v(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1718 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].attn.norm_k, accessed_by=DictGetItemGuardAccessor(norm_k) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[2].attn.norm_k, 140581770188384) # if attn.norm_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1729 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].attn.norm_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[2].attn.norm_k.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].attn.norm_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[2].attn.norm_k.training, 140591004393440) # if attn.norm_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1729 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].attn.norm_k.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[2].attn.norm_k.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].attn.norm_k._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].attn.norm_k.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[2].attn.norm_k.weight, 140581772721376) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].attn.norm_k._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].attn.norm_k._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].attn.norm_k._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].attn.norm_k._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].attn.norm_q, accessed_by=DictGetItemGuardAccessor(norm_q) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[2].attn.norm_q, 140581770188240) # if attn.norm_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1727 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].attn.norm_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[2].attn.norm_q.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].attn.norm_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[2].attn.norm_q.training, 140591004393440) # if attn.norm_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1727 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].attn.norm_q.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[2].attn.norm_q.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].attn.norm_q._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].attn.norm_q.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[2].attn.norm_q.weight, 140581765987072) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].attn.norm_q._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].attn.norm_q._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].attn.norm_q._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].attn.norm_q._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].attn.heads, accessed_by=DictGetItemGuardAccessor(heads) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[2].attn.heads == 24 # head_dim = inner_dim // attn.heads # diffusers/src/diffusers/models/attention_processor.py:1721 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].attn.processor, accessed_by=DictGetItemGuardAccessor(processor) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[2].attn.processor, 93831581524080) # attn_parameters = set(inspect.signature(self.processor.__call__).parameters.keys()) # diffusers/src/diffusers/models/attention_processor.py:479 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[2].attn.processor, 140581770188144) # return self.processor( # diffusers/src/diffusers/models/attention_processor.py:490 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].attn._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].attn._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].attn._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].attn._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].attn.forward, accessed_by=GetAttrGuardAccessor(forward) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].attn.forward, accessed_by=FuncDefaultsGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].attn.forward.__defaults__[0], accessed_by=GetItemGuardAccessor(0) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[2].attn.forward.__defaults__[0], 140591004478624) # batch_size, _, _ = hidden_states.shape if encoder_hidden_states is None else encoder_hidden_states.shape # diffusers/src/diffusers/models/attention_processor.py:1713 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].norm, accessed_by=DictGetItemGuardAccessor(norm) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[2].norm, 140581770187808) # norm_hidden_states, gate = self.norm(hidden_states, emb=temb) # diffusers/src/diffusers/models/transformers/transformer_flux.py:88 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].norm.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[2].norm.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].norm.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[2].norm.training, 140591004393440) # norm_hidden_states, gate = self.norm(hidden_states, emb=temb) # diffusers/src/diffusers/models/transformers/transformer_flux.py:88 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].norm._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].norm.norm, accessed_by=DictGetItemGuardAccessor(norm) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[2].norm.norm, 140581770187952) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:171 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].norm.norm.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].norm.norm.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[2].norm.norm.training, 140591004393440) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:171 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].norm.silu, accessed_by=DictGetItemGuardAccessor(silu) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[2].norm.silu, 140581770187856) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].norm.silu.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].norm.silu.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[2].norm.silu.training, 140591004393440) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].norm.linear, accessed_by=DictGetItemGuardAccessor(linear) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[2].norm.linear, 140581770187904) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].norm.linear.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].norm.linear.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[2].norm.linear.training, 140591004393440) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].norm._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].norm._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].norm._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].norm._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].act_mlp, accessed_by=DictGetItemGuardAccessor(act_mlp) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[2].act_mlp, 140581770188048) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].act_mlp.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].act_mlp.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[2].act_mlp.training, 140591004393440) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].proj_mlp, accessed_by=DictGetItemGuardAccessor(proj_mlp) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[2].proj_mlp, 140581770188000) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].proj_mlp.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].proj_mlp.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[2].proj_mlp.training, 140591004393440) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].proj_out, accessed_by=DictGetItemGuardAccessor(proj_out) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[2].proj_out, 140581770188096) # hidden_states = gate * self.proj_out(hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:98 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].proj_out.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].proj_out.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[2].proj_out.training, 140591004393440) # hidden_states = gate * self.proj_out(hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:98 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3], accessed_by=GetItemGuardAccessor(3) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[3], 140581770187760) # for index_block, block in enumerate(self.single_transformer_blocks): # diffusers/src/diffusers/models/transformers/transformer_flux.py:509 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[3].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[3].training, 140591004393440) # for index_block, block in enumerate(self.single_transformer_blocks): # diffusers/src/diffusers/models/transformers/transformer_flux.py:509 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].attn, accessed_by=DictGetItemGuardAccessor(attn) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[3].attn, 140581770188960) # attn_output = self.attn( # diffusers/src/diffusers/models/transformers/transformer_flux.py:91 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].attn.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[3].attn.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].attn.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[3].attn.training, 140591004393440) # attn_output = self.attn( # diffusers/src/diffusers/models/transformers/transformer_flux.py:91 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].attn._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].attn.to_k, accessed_by=DictGetItemGuardAccessor(to_k) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[3].attn.to_k, 140581770189104) # key = attn.to_k(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1717 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].attn.to_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].attn.to_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[3].attn.to_k.training, 140591004393440) # key = attn.to_k(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1717 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].attn.to_q, accessed_by=DictGetItemGuardAccessor(to_q) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[3].attn.to_q, 140581770189200) # query = attn.to_q(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1716 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].attn.to_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].attn.to_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[3].attn.to_q.training, 140591004393440) # query = attn.to_q(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1716 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].attn.to_v, accessed_by=DictGetItemGuardAccessor(to_v) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[3].attn.to_v, 140581770189248) # value = attn.to_v(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1718 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].attn.to_v.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].attn.to_v.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[3].attn.to_v.training, 140591004393440) # value = attn.to_v(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1718 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].attn.norm_k, accessed_by=DictGetItemGuardAccessor(norm_k) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[3].attn.norm_k, 140581770189152) # if attn.norm_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1729 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].attn.norm_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[3].attn.norm_k.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].attn.norm_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[3].attn.norm_k.training, 140591004393440) # if attn.norm_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1729 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].attn.norm_k.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[3].attn.norm_k.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].attn.norm_k._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].attn.norm_k.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[3].attn.norm_k.weight, 140581772786432) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].attn.norm_k._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].attn.norm_k._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].attn.norm_k._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].attn.norm_k._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].attn.norm_q, accessed_by=DictGetItemGuardAccessor(norm_q) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[3].attn.norm_q, 140581770189008) # if attn.norm_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1727 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].attn.norm_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[3].attn.norm_q.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].attn.norm_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[3].attn.norm_q.training, 140591004393440) # if attn.norm_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1727 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].attn.norm_q.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[3].attn.norm_q.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].attn.norm_q._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].attn.norm_q.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[3].attn.norm_q.weight, 140581772783632) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].attn.norm_q._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].attn.norm_q._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].attn.norm_q._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].attn.norm_q._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].attn.heads, accessed_by=DictGetItemGuardAccessor(heads) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[3].attn.heads == 24 # head_dim = inner_dim // attn.heads # diffusers/src/diffusers/models/attention_processor.py:1721 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].attn.processor, accessed_by=DictGetItemGuardAccessor(processor) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[3].attn.processor, 93831581524080) # attn_parameters = set(inspect.signature(self.processor.__call__).parameters.keys()) # diffusers/src/diffusers/models/attention_processor.py:479 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[3].attn.processor, 140581770188912) # return self.processor( # diffusers/src/diffusers/models/attention_processor.py:490 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].attn._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].attn._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].attn._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].attn._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].attn.forward, accessed_by=GetAttrGuardAccessor(forward) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].attn.forward, accessed_by=FuncDefaultsGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].attn.forward.__defaults__[0], accessed_by=GetItemGuardAccessor(0) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[3].attn.forward.__defaults__[0], 140591004478624) # batch_size, _, _ = hidden_states.shape if encoder_hidden_states is None else encoder_hidden_states.shape # diffusers/src/diffusers/models/attention_processor.py:1713 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].norm, accessed_by=DictGetItemGuardAccessor(norm) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[3].norm, 140581770188576) # norm_hidden_states, gate = self.norm(hidden_states, emb=temb) # diffusers/src/diffusers/models/transformers/transformer_flux.py:88 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].norm.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[3].norm.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].norm.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[3].norm.training, 140591004393440) # norm_hidden_states, gate = self.norm(hidden_states, emb=temb) # diffusers/src/diffusers/models/transformers/transformer_flux.py:88 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].norm._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].norm.norm, accessed_by=DictGetItemGuardAccessor(norm) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[3].norm.norm, 140581770188720) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:171 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].norm.norm.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].norm.norm.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[3].norm.norm.training, 140591004393440) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:171 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].norm.silu, accessed_by=DictGetItemGuardAccessor(silu) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[3].norm.silu, 140581770188624) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].norm.silu.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].norm.silu.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[3].norm.silu.training, 140591004393440) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].norm.linear, accessed_by=DictGetItemGuardAccessor(linear) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[3].norm.linear, 140581770188672) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].norm.linear.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].norm.linear.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[3].norm.linear.training, 140591004393440) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].norm._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].norm._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].norm._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].norm._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].act_mlp, accessed_by=DictGetItemGuardAccessor(act_mlp) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[3].act_mlp, 140581770188816) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].act_mlp.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].act_mlp.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[3].act_mlp.training, 140591004393440) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].proj_mlp, accessed_by=DictGetItemGuardAccessor(proj_mlp) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[3].proj_mlp, 140581770188768) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].proj_mlp.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].proj_mlp.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[3].proj_mlp.training, 140591004393440) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].proj_out, accessed_by=DictGetItemGuardAccessor(proj_out) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[3].proj_out, 140581770188864) # hidden_states = gate * self.proj_out(hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:98 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].proj_out.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].proj_out.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[3].proj_out.training, 140591004393440) # hidden_states = gate * self.proj_out(hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:98 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4], accessed_by=GetItemGuardAccessor(4) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[4], 140581770188528) # for index_block, block in enumerate(self.single_transformer_blocks): # diffusers/src/diffusers/models/transformers/transformer_flux.py:509 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[4].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[4].training, 140591004393440) # for index_block, block in enumerate(self.single_transformer_blocks): # diffusers/src/diffusers/models/transformers/transformer_flux.py:509 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].attn, accessed_by=DictGetItemGuardAccessor(attn) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[4].attn, 140581770189728) # attn_output = self.attn( # diffusers/src/diffusers/models/transformers/transformer_flux.py:91 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].attn.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[4].attn.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].attn.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[4].attn.training, 140591004393440) # attn_output = self.attn( # diffusers/src/diffusers/models/transformers/transformer_flux.py:91 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].attn._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].attn.to_k, accessed_by=DictGetItemGuardAccessor(to_k) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[4].attn.to_k, 140581770189872) # key = attn.to_k(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1717 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].attn.to_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].attn.to_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[4].attn.to_k.training, 140591004393440) # key = attn.to_k(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1717 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].attn.to_q, accessed_by=DictGetItemGuardAccessor(to_q) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[4].attn.to_q, 140581770189968) # query = attn.to_q(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1716 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].attn.to_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].attn.to_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[4].attn.to_q.training, 140591004393440) # query = attn.to_q(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1716 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].attn.to_v, accessed_by=DictGetItemGuardAccessor(to_v) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[4].attn.to_v, 140581770190016) # value = attn.to_v(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1718 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].attn.to_v.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].attn.to_v.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[4].attn.to_v.training, 140591004393440) # value = attn.to_v(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1718 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].attn.norm_k, accessed_by=DictGetItemGuardAccessor(norm_k) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[4].attn.norm_k, 140581770189920) # if attn.norm_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1729 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].attn.norm_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[4].attn.norm_k.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].attn.norm_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[4].attn.norm_k.training, 140591004393440) # if attn.norm_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1729 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].attn.norm_k.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[4].attn.norm_k.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].attn.norm_k._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].attn.norm_k.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[4].attn.norm_k.weight, 140581772771952) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].attn.norm_k._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].attn.norm_k._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].attn.norm_k._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].attn.norm_k._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].attn.norm_q, accessed_by=DictGetItemGuardAccessor(norm_q) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[4].attn.norm_q, 140581770189776) # if attn.norm_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1727 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].attn.norm_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[4].attn.norm_q.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].attn.norm_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[4].attn.norm_q.training, 140591004393440) # if attn.norm_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1727 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].attn.norm_q.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[4].attn.norm_q.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].attn.norm_q._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].attn.norm_q.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[4].attn.norm_q.weight, 140581783349632) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].attn.norm_q._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].attn.norm_q._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].attn.norm_q._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].attn.norm_q._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].attn.heads, accessed_by=DictGetItemGuardAccessor(heads) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[4].attn.heads == 24 # head_dim = inner_dim // attn.heads # diffusers/src/diffusers/models/attention_processor.py:1721 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].attn.processor, accessed_by=DictGetItemGuardAccessor(processor) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[4].attn.processor, 93831581524080) # attn_parameters = set(inspect.signature(self.processor.__call__).parameters.keys()) # diffusers/src/diffusers/models/attention_processor.py:479 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[4].attn.processor, 140581770189680) # return self.processor( # diffusers/src/diffusers/models/attention_processor.py:490 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].attn._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].attn._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].attn._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].attn._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].attn.forward, accessed_by=GetAttrGuardAccessor(forward) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].attn.forward, accessed_by=FuncDefaultsGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].attn.forward.__defaults__[0], accessed_by=GetItemGuardAccessor(0) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[4].attn.forward.__defaults__[0], 140591004478624) # batch_size, _, _ = hidden_states.shape if encoder_hidden_states is None else encoder_hidden_states.shape # diffusers/src/diffusers/models/attention_processor.py:1713 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].norm, accessed_by=DictGetItemGuardAccessor(norm) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[4].norm, 140581770189344) # norm_hidden_states, gate = self.norm(hidden_states, emb=temb) # diffusers/src/diffusers/models/transformers/transformer_flux.py:88 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].norm.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[4].norm.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].norm.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[4].norm.training, 140591004393440) # norm_hidden_states, gate = self.norm(hidden_states, emb=temb) # diffusers/src/diffusers/models/transformers/transformer_flux.py:88 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].norm._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].norm.norm, accessed_by=DictGetItemGuardAccessor(norm) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[4].norm.norm, 140581770189488) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:171 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].norm.norm.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].norm.norm.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[4].norm.norm.training, 140591004393440) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:171 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].norm.silu, accessed_by=DictGetItemGuardAccessor(silu) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[4].norm.silu, 140581770189392) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].norm.silu.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].norm.silu.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[4].norm.silu.training, 140591004393440) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].norm.linear, accessed_by=DictGetItemGuardAccessor(linear) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[4].norm.linear, 140581770189440) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].norm.linear.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].norm.linear.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[4].norm.linear.training, 140591004393440) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].norm._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].norm._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].norm._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].norm._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].act_mlp, accessed_by=DictGetItemGuardAccessor(act_mlp) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[4].act_mlp, 140581770189584) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].act_mlp.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].act_mlp.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[4].act_mlp.training, 140591004393440) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].proj_mlp, accessed_by=DictGetItemGuardAccessor(proj_mlp) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[4].proj_mlp, 140581770189536) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].proj_mlp.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].proj_mlp.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[4].proj_mlp.training, 140591004393440) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].proj_out, accessed_by=DictGetItemGuardAccessor(proj_out) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[4].proj_out, 140581770189632) # hidden_states = gate * self.proj_out(hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:98 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].proj_out.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].proj_out.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[4].proj_out.training, 140591004393440) # hidden_states = gate * self.proj_out(hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:98 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5], accessed_by=GetItemGuardAccessor(5) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[5], 140581770189296) # for index_block, block in enumerate(self.single_transformer_blocks): # diffusers/src/diffusers/models/transformers/transformer_flux.py:509 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[5].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[5].training, 140591004393440) # for index_block, block in enumerate(self.single_transformer_blocks): # diffusers/src/diffusers/models/transformers/transformer_flux.py:509 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].attn, accessed_by=DictGetItemGuardAccessor(attn) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[5].attn, 140581770190496) # attn_output = self.attn( # diffusers/src/diffusers/models/transformers/transformer_flux.py:91 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].attn.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[5].attn.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].attn.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[5].attn.training, 140591004393440) # attn_output = self.attn( # diffusers/src/diffusers/models/transformers/transformer_flux.py:91 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].attn._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].attn.to_k, accessed_by=DictGetItemGuardAccessor(to_k) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[5].attn.to_k, 140581770190640) # key = attn.to_k(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1717 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].attn.to_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].attn.to_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[5].attn.to_k.training, 140591004393440) # key = attn.to_k(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1717 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].attn.to_q, accessed_by=DictGetItemGuardAccessor(to_q) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[5].attn.to_q, 140581770190736) # query = attn.to_q(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1716 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].attn.to_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].attn.to_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[5].attn.to_q.training, 140591004393440) # query = attn.to_q(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1716 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].attn.to_v, accessed_by=DictGetItemGuardAccessor(to_v) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[5].attn.to_v, 140581770190784) # value = attn.to_v(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1718 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].attn.to_v.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].attn.to_v.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[5].attn.to_v.training, 140591004393440) # value = attn.to_v(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1718 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].attn.norm_k, accessed_by=DictGetItemGuardAccessor(norm_k) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[5].attn.norm_k, 140581770190688) # if attn.norm_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1729 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].attn.norm_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[5].attn.norm_k.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].attn.norm_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[5].attn.norm_k.training, 140591004393440) # if attn.norm_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1729 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].attn.norm_k.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[5].attn.norm_k.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].attn.norm_k._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].attn.norm_k.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[5].attn.norm_k.weight, 140581772502624) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].attn.norm_k._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].attn.norm_k._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].attn.norm_k._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].attn.norm_k._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].attn.norm_q, accessed_by=DictGetItemGuardAccessor(norm_q) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[5].attn.norm_q, 140581770190544) # if attn.norm_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1727 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].attn.norm_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[5].attn.norm_q.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].attn.norm_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[5].attn.norm_q.training, 140591004393440) # if attn.norm_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1727 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].attn.norm_q.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[5].attn.norm_q.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].attn.norm_q._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].attn.norm_q.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[5].attn.norm_q.weight, 140581783352032) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].attn.norm_q._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].attn.norm_q._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].attn.norm_q._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].attn.norm_q._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].attn.heads, accessed_by=DictGetItemGuardAccessor(heads) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[5].attn.heads == 24 # head_dim = inner_dim // attn.heads # diffusers/src/diffusers/models/attention_processor.py:1721 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].attn.processor, accessed_by=DictGetItemGuardAccessor(processor) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[5].attn.processor, 93831581524080) # attn_parameters = set(inspect.signature(self.processor.__call__).parameters.keys()) # diffusers/src/diffusers/models/attention_processor.py:479 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[5].attn.processor, 140581770190448) # return self.processor( # diffusers/src/diffusers/models/attention_processor.py:490 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].attn._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].attn._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].attn._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].attn._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].attn.forward, accessed_by=GetAttrGuardAccessor(forward) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].attn.forward, accessed_by=FuncDefaultsGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].attn.forward.__defaults__[0], accessed_by=GetItemGuardAccessor(0) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[5].attn.forward.__defaults__[0], 140591004478624) # batch_size, _, _ = hidden_states.shape if encoder_hidden_states is None else encoder_hidden_states.shape # diffusers/src/diffusers/models/attention_processor.py:1713 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].norm, accessed_by=DictGetItemGuardAccessor(norm) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[5].norm, 140581770190112) # norm_hidden_states, gate = self.norm(hidden_states, emb=temb) # diffusers/src/diffusers/models/transformers/transformer_flux.py:88 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].norm.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[5].norm.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].norm.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[5].norm.training, 140591004393440) # norm_hidden_states, gate = self.norm(hidden_states, emb=temb) # diffusers/src/diffusers/models/transformers/transformer_flux.py:88 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].norm._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].norm.norm, accessed_by=DictGetItemGuardAccessor(norm) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[5].norm.norm, 140581770190256) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:171 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].norm.norm.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].norm.norm.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[5].norm.norm.training, 140591004393440) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:171 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].norm.silu, accessed_by=DictGetItemGuardAccessor(silu) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[5].norm.silu, 140581770190160) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].norm.silu.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].norm.silu.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[5].norm.silu.training, 140591004393440) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].norm.linear, accessed_by=DictGetItemGuardAccessor(linear) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[5].norm.linear, 140581770190208) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].norm.linear.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].norm.linear.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[5].norm.linear.training, 140591004393440) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].norm._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].norm._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].norm._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].norm._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].act_mlp, accessed_by=DictGetItemGuardAccessor(act_mlp) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[5].act_mlp, 140581770190352) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].act_mlp.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].act_mlp.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[5].act_mlp.training, 140591004393440) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].proj_mlp, accessed_by=DictGetItemGuardAccessor(proj_mlp) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[5].proj_mlp, 140581770190304) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].proj_mlp.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].proj_mlp.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[5].proj_mlp.training, 140591004393440) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].proj_out, accessed_by=DictGetItemGuardAccessor(proj_out) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[5].proj_out, 140581770190400) # hidden_states = gate * self.proj_out(hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:98 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].proj_out.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].proj_out.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[5].proj_out.training, 140591004393440) # hidden_states = gate * self.proj_out(hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:98 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6], accessed_by=GetItemGuardAccessor(6) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[6], 140581770190064) # for index_block, block in enumerate(self.single_transformer_blocks): # diffusers/src/diffusers/models/transformers/transformer_flux.py:509 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[6].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[6].training, 140591004393440) # for index_block, block in enumerate(self.single_transformer_blocks): # diffusers/src/diffusers/models/transformers/transformer_flux.py:509 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].attn, accessed_by=DictGetItemGuardAccessor(attn) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[6].attn, 140581770191264) # attn_output = self.attn( # diffusers/src/diffusers/models/transformers/transformer_flux.py:91 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].attn.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[6].attn.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].attn.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[6].attn.training, 140591004393440) # attn_output = self.attn( # diffusers/src/diffusers/models/transformers/transformer_flux.py:91 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].attn._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].attn.to_k, accessed_by=DictGetItemGuardAccessor(to_k) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[6].attn.to_k, 140581770191408) # key = attn.to_k(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1717 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].attn.to_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].attn.to_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[6].attn.to_k.training, 140591004393440) # key = attn.to_k(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1717 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].attn.to_q, accessed_by=DictGetItemGuardAccessor(to_q) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[6].attn.to_q, 140581770191504) # query = attn.to_q(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1716 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].attn.to_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].attn.to_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[6].attn.to_q.training, 140591004393440) # query = attn.to_q(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1716 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].attn.to_v, accessed_by=DictGetItemGuardAccessor(to_v) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[6].attn.to_v, 140581770191552) # value = attn.to_v(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1718 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].attn.to_v.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].attn.to_v.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[6].attn.to_v.training, 140591004393440) # value = attn.to_v(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1718 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].attn.norm_k, accessed_by=DictGetItemGuardAccessor(norm_k) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[6].attn.norm_k, 140581770191456) # if attn.norm_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1729 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].attn.norm_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[6].attn.norm_k.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].attn.norm_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[6].attn.norm_k.training, 140591004393440) # if attn.norm_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1729 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].attn.norm_k.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[6].attn.norm_k.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].attn.norm_k._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].attn.norm_k.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[6].attn.norm_k.weight, 140581773356832) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].attn.norm_k._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].attn.norm_k._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].attn.norm_k._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].attn.norm_k._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].attn.norm_q, accessed_by=DictGetItemGuardAccessor(norm_q) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[6].attn.norm_q, 140581770191312) # if attn.norm_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1727 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].attn.norm_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[6].attn.norm_q.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].attn.norm_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[6].attn.norm_q.training, 140591004393440) # if attn.norm_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1727 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].attn.norm_q.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[6].attn.norm_q.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].attn.norm_q._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].attn.norm_q.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[6].attn.norm_q.weight, 140581773349152) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].attn.norm_q._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].attn.norm_q._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].attn.norm_q._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].attn.norm_q._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].attn.heads, accessed_by=DictGetItemGuardAccessor(heads) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[6].attn.heads == 24 # head_dim = inner_dim // attn.heads # diffusers/src/diffusers/models/attention_processor.py:1721 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].attn.processor, accessed_by=DictGetItemGuardAccessor(processor) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[6].attn.processor, 93831581524080) # attn_parameters = set(inspect.signature(self.processor.__call__).parameters.keys()) # diffusers/src/diffusers/models/attention_processor.py:479 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[6].attn.processor, 140581770191216) # return self.processor( # diffusers/src/diffusers/models/attention_processor.py:490 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].attn._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].attn._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].attn._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].attn._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].attn.forward, accessed_by=GetAttrGuardAccessor(forward) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].attn.forward, accessed_by=FuncDefaultsGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].attn.forward.__defaults__[0], accessed_by=GetItemGuardAccessor(0) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[6].attn.forward.__defaults__[0], 140591004478624) # batch_size, _, _ = hidden_states.shape if encoder_hidden_states is None else encoder_hidden_states.shape # diffusers/src/diffusers/models/attention_processor.py:1713 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].norm, accessed_by=DictGetItemGuardAccessor(norm) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[6].norm, 140581770190880) # norm_hidden_states, gate = self.norm(hidden_states, emb=temb) # diffusers/src/diffusers/models/transformers/transformer_flux.py:88 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].norm.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[6].norm.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].norm.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[6].norm.training, 140591004393440) # norm_hidden_states, gate = self.norm(hidden_states, emb=temb) # diffusers/src/diffusers/models/transformers/transformer_flux.py:88 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].norm._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].norm.norm, accessed_by=DictGetItemGuardAccessor(norm) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[6].norm.norm, 140581770191024) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:171 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].norm.norm.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].norm.norm.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[6].norm.norm.training, 140591004393440) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:171 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].norm.silu, accessed_by=DictGetItemGuardAccessor(silu) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[6].norm.silu, 140581770190928) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].norm.silu.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].norm.silu.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[6].norm.silu.training, 140591004393440) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].norm.linear, accessed_by=DictGetItemGuardAccessor(linear) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[6].norm.linear, 140581770190976) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].norm.linear.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].norm.linear.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[6].norm.linear.training, 140591004393440) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].norm._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].norm._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].norm._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].norm._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].act_mlp, accessed_by=DictGetItemGuardAccessor(act_mlp) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[6].act_mlp, 140581770191120) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].act_mlp.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].act_mlp.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[6].act_mlp.training, 140591004393440) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].proj_mlp, accessed_by=DictGetItemGuardAccessor(proj_mlp) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[6].proj_mlp, 140581770191072) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].proj_mlp.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].proj_mlp.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[6].proj_mlp.training, 140591004393440) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].proj_out, accessed_by=DictGetItemGuardAccessor(proj_out) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[6].proj_out, 140581770191168) # hidden_states = gate * self.proj_out(hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:98 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].proj_out.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].proj_out.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[6].proj_out.training, 140591004393440) # hidden_states = gate * self.proj_out(hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:98 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7], accessed_by=GetItemGuardAccessor(7) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[7], 140581770190832) # for index_block, block in enumerate(self.single_transformer_blocks): # diffusers/src/diffusers/models/transformers/transformer_flux.py:509 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[7].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[7].training, 140591004393440) # for index_block, block in enumerate(self.single_transformer_blocks): # diffusers/src/diffusers/models/transformers/transformer_flux.py:509 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].attn, accessed_by=DictGetItemGuardAccessor(attn) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[7].attn, 140581770192032) # attn_output = self.attn( # diffusers/src/diffusers/models/transformers/transformer_flux.py:91 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].attn.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[7].attn.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].attn.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[7].attn.training, 140591004393440) # attn_output = self.attn( # diffusers/src/diffusers/models/transformers/transformer_flux.py:91 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].attn._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].attn.to_k, accessed_by=DictGetItemGuardAccessor(to_k) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[7].attn.to_k, 140581770192176) # key = attn.to_k(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1717 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].attn.to_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].attn.to_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[7].attn.to_k.training, 140591004393440) # key = attn.to_k(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1717 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].attn.to_q, accessed_by=DictGetItemGuardAccessor(to_q) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[7].attn.to_q, 140581770192272) # query = attn.to_q(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1716 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].attn.to_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].attn.to_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[7].attn.to_q.training, 140591004393440) # query = attn.to_q(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1716 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].attn.to_v, accessed_by=DictGetItemGuardAccessor(to_v) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[7].attn.to_v, 140581770192320) # value = attn.to_v(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1718 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].attn.to_v.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].attn.to_v.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[7].attn.to_v.training, 140591004393440) # value = attn.to_v(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1718 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].attn.norm_k, accessed_by=DictGetItemGuardAccessor(norm_k) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[7].attn.norm_k, 140581770192224) # if attn.norm_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1729 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].attn.norm_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[7].attn.norm_k.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].attn.norm_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[7].attn.norm_k.training, 140591004393440) # if attn.norm_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1729 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].attn.norm_k.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[7].attn.norm_k.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].attn.norm_k._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].attn.norm_k.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[7].attn.norm_k.weight, 140581765865344) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].attn.norm_k._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].attn.norm_k._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].attn.norm_k._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].attn.norm_k._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].attn.norm_q, accessed_by=DictGetItemGuardAccessor(norm_q) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[7].attn.norm_q, 140581770192080) # if attn.norm_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1727 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].attn.norm_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[7].attn.norm_q.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].attn.norm_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[7].attn.norm_q.training, 140591004393440) # if attn.norm_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1727 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].attn.norm_q.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[7].attn.norm_q.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].attn.norm_q._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].attn.norm_q.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[7].attn.norm_q.weight, 140581783351872) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].attn.norm_q._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].attn.norm_q._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].attn.norm_q._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].attn.norm_q._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].attn.heads, accessed_by=DictGetItemGuardAccessor(heads) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[7].attn.heads == 24 # head_dim = inner_dim // attn.heads # diffusers/src/diffusers/models/attention_processor.py:1721 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].attn.processor, accessed_by=DictGetItemGuardAccessor(processor) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[7].attn.processor, 93831581524080) # attn_parameters = set(inspect.signature(self.processor.__call__).parameters.keys()) # diffusers/src/diffusers/models/attention_processor.py:479 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[7].attn.processor, 140581770191984) # return self.processor( # diffusers/src/diffusers/models/attention_processor.py:490 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].attn._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].attn._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].attn._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].attn._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].attn.forward, accessed_by=GetAttrGuardAccessor(forward) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].attn.forward, accessed_by=FuncDefaultsGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].attn.forward.__defaults__[0], accessed_by=GetItemGuardAccessor(0) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[7].attn.forward.__defaults__[0], 140591004478624) # batch_size, _, _ = hidden_states.shape if encoder_hidden_states is None else encoder_hidden_states.shape # diffusers/src/diffusers/models/attention_processor.py:1713 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].norm, accessed_by=DictGetItemGuardAccessor(norm) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[7].norm, 140581770191648) # norm_hidden_states, gate = self.norm(hidden_states, emb=temb) # diffusers/src/diffusers/models/transformers/transformer_flux.py:88 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].norm.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[7].norm.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].norm.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[7].norm.training, 140591004393440) # norm_hidden_states, gate = self.norm(hidden_states, emb=temb) # diffusers/src/diffusers/models/transformers/transformer_flux.py:88 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].norm._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].norm.norm, accessed_by=DictGetItemGuardAccessor(norm) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[7].norm.norm, 140581770191792) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:171 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].norm.norm.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].norm.norm.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[7].norm.norm.training, 140591004393440) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:171 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].norm.silu, accessed_by=DictGetItemGuardAccessor(silu) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[7].norm.silu, 140581770191696) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].norm.silu.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].norm.silu.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[7].norm.silu.training, 140591004393440) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].norm.linear, accessed_by=DictGetItemGuardAccessor(linear) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[7].norm.linear, 140581770191744) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].norm.linear.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].norm.linear.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[7].norm.linear.training, 140591004393440) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].norm._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].norm._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].norm._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].norm._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].act_mlp, accessed_by=DictGetItemGuardAccessor(act_mlp) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[7].act_mlp, 140581770191888) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].act_mlp.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].act_mlp.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[7].act_mlp.training, 140591004393440) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].proj_mlp, accessed_by=DictGetItemGuardAccessor(proj_mlp) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[7].proj_mlp, 140581770191840) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].proj_mlp.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].proj_mlp.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[7].proj_mlp.training, 140591004393440) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].proj_out, accessed_by=DictGetItemGuardAccessor(proj_out) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[7].proj_out, 140581770191936) # hidden_states = gate * self.proj_out(hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:98 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].proj_out.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].proj_out.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[7].proj_out.training, 140591004393440) # hidden_states = gate * self.proj_out(hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:98 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8], accessed_by=GetItemGuardAccessor(8) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[8], 140581770191600) # for index_block, block in enumerate(self.single_transformer_blocks): # diffusers/src/diffusers/models/transformers/transformer_flux.py:509 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[8].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[8].training, 140591004393440) # for index_block, block in enumerate(self.single_transformer_blocks): # diffusers/src/diffusers/models/transformers/transformer_flux.py:509 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].attn, accessed_by=DictGetItemGuardAccessor(attn) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[8].attn, 140581770192800) # attn_output = self.attn( # diffusers/src/diffusers/models/transformers/transformer_flux.py:91 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].attn.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[8].attn.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].attn.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[8].attn.training, 140591004393440) # attn_output = self.attn( # diffusers/src/diffusers/models/transformers/transformer_flux.py:91 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].attn._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].attn.to_k, accessed_by=DictGetItemGuardAccessor(to_k) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[8].attn.to_k, 140581770192944) # key = attn.to_k(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1717 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].attn.to_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].attn.to_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[8].attn.to_k.training, 140591004393440) # key = attn.to_k(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1717 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].attn.to_q, accessed_by=DictGetItemGuardAccessor(to_q) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[8].attn.to_q, 140581770193040) # query = attn.to_q(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1716 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].attn.to_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].attn.to_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[8].attn.to_q.training, 140591004393440) # query = attn.to_q(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1716 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].attn.to_v, accessed_by=DictGetItemGuardAccessor(to_v) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[8].attn.to_v, 140581770193088) # value = attn.to_v(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1718 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].attn.to_v.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].attn.to_v.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[8].attn.to_v.training, 140591004393440) # value = attn.to_v(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1718 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].attn.norm_k, accessed_by=DictGetItemGuardAccessor(norm_k) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[8].attn.norm_k, 140581770192992) # if attn.norm_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1729 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].attn.norm_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[8].attn.norm_k.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].attn.norm_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[8].attn.norm_k.training, 140591004393440) # if attn.norm_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1729 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].attn.norm_k.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[8].attn.norm_k.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].attn.norm_k._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].attn.norm_k.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[8].attn.norm_k.weight, 140581766103104) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].attn.norm_k._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].attn.norm_k._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].attn.norm_k._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].attn.norm_k._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].attn.norm_q, accessed_by=DictGetItemGuardAccessor(norm_q) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[8].attn.norm_q, 140581770192848) # if attn.norm_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1727 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].attn.norm_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[8].attn.norm_q.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].attn.norm_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[8].attn.norm_q.training, 140591004393440) # if attn.norm_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1727 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].attn.norm_q.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[8].attn.norm_q.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].attn.norm_q._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].attn.norm_q.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[8].attn.norm_q.weight, 140581773350752) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].attn.norm_q._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].attn.norm_q._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].attn.norm_q._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].attn.norm_q._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].attn.heads, accessed_by=DictGetItemGuardAccessor(heads) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[8].attn.heads == 24 # head_dim = inner_dim // attn.heads # diffusers/src/diffusers/models/attention_processor.py:1721 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].attn.processor, accessed_by=DictGetItemGuardAccessor(processor) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[8].attn.processor, 93831581524080) # attn_parameters = set(inspect.signature(self.processor.__call__).parameters.keys()) # diffusers/src/diffusers/models/attention_processor.py:479 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[8].attn.processor, 140581770192752) # return self.processor( # diffusers/src/diffusers/models/attention_processor.py:490 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].attn._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].attn._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].attn._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].attn._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].attn.forward, accessed_by=GetAttrGuardAccessor(forward) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].attn.forward, accessed_by=FuncDefaultsGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].attn.forward.__defaults__[0], accessed_by=GetItemGuardAccessor(0) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[8].attn.forward.__defaults__[0], 140591004478624) # batch_size, _, _ = hidden_states.shape if encoder_hidden_states is None else encoder_hidden_states.shape # diffusers/src/diffusers/models/attention_processor.py:1713 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].norm, accessed_by=DictGetItemGuardAccessor(norm) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[8].norm, 140581770192416) # norm_hidden_states, gate = self.norm(hidden_states, emb=temb) # diffusers/src/diffusers/models/transformers/transformer_flux.py:88 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].norm.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[8].norm.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].norm.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[8].norm.training, 140591004393440) # norm_hidden_states, gate = self.norm(hidden_states, emb=temb) # diffusers/src/diffusers/models/transformers/transformer_flux.py:88 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].norm._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].norm.norm, accessed_by=DictGetItemGuardAccessor(norm) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[8].norm.norm, 140581770192560) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:171 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].norm.norm.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].norm.norm.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[8].norm.norm.training, 140591004393440) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:171 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].norm.silu, accessed_by=DictGetItemGuardAccessor(silu) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[8].norm.silu, 140581770192464) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].norm.silu.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].norm.silu.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[8].norm.silu.training, 140591004393440) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].norm.linear, accessed_by=DictGetItemGuardAccessor(linear) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[8].norm.linear, 140581770192512) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].norm.linear.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].norm.linear.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[8].norm.linear.training, 140591004393440) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].norm._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].norm._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].norm._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].norm._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].act_mlp, accessed_by=DictGetItemGuardAccessor(act_mlp) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[8].act_mlp, 140581770192656) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].act_mlp.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].act_mlp.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[8].act_mlp.training, 140591004393440) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].proj_mlp, accessed_by=DictGetItemGuardAccessor(proj_mlp) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[8].proj_mlp, 140581770192608) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].proj_mlp.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].proj_mlp.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[8].proj_mlp.training, 140591004393440) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].proj_out, accessed_by=DictGetItemGuardAccessor(proj_out) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[8].proj_out, 140581770192704) # hidden_states = gate * self.proj_out(hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:98 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].proj_out.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].proj_out.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[8].proj_out.training, 140591004393440) # hidden_states = gate * self.proj_out(hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:98 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9], accessed_by=GetItemGuardAccessor(9) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[9], 140581770192368) # for index_block, block in enumerate(self.single_transformer_blocks): # diffusers/src/diffusers/models/transformers/transformer_flux.py:509 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[9].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[9].training, 140591004393440) # for index_block, block in enumerate(self.single_transformer_blocks): # diffusers/src/diffusers/models/transformers/transformer_flux.py:509 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].attn, accessed_by=DictGetItemGuardAccessor(attn) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[9].attn, 140581770193568) # attn_output = self.attn( # diffusers/src/diffusers/models/transformers/transformer_flux.py:91 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].attn.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[9].attn.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].attn.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[9].attn.training, 140591004393440) # attn_output = self.attn( # diffusers/src/diffusers/models/transformers/transformer_flux.py:91 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].attn._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].attn.to_k, accessed_by=DictGetItemGuardAccessor(to_k) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[9].attn.to_k, 140581770193712) # key = attn.to_k(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1717 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].attn.to_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].attn.to_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[9].attn.to_k.training, 140591004393440) # key = attn.to_k(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1717 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].attn.to_q, accessed_by=DictGetItemGuardAccessor(to_q) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[9].attn.to_q, 140581770193808) # query = attn.to_q(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1716 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].attn.to_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].attn.to_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[9].attn.to_q.training, 140591004393440) # query = attn.to_q(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1716 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].attn.to_v, accessed_by=DictGetItemGuardAccessor(to_v) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[9].attn.to_v, 140581770193856) # value = attn.to_v(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1718 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].attn.to_v.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].attn.to_v.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[9].attn.to_v.training, 140591004393440) # value = attn.to_v(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1718 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].attn.norm_k, accessed_by=DictGetItemGuardAccessor(norm_k) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[9].attn.norm_k, 140581770193760) # if attn.norm_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1729 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].attn.norm_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[9].attn.norm_k.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].attn.norm_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[9].attn.norm_k.training, 140591004393440) # if attn.norm_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1729 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].attn.norm_k.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[9].attn.norm_k.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].attn.norm_k._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].attn.norm_k.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[9].attn.norm_k.weight, 140581765867824) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].attn.norm_k._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].attn.norm_k._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].attn.norm_k._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].attn.norm_k._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].attn.norm_q, accessed_by=DictGetItemGuardAccessor(norm_q) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[9].attn.norm_q, 140581770193616) # if attn.norm_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1727 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].attn.norm_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[9].attn.norm_q.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].attn.norm_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[9].attn.norm_q.training, 140591004393440) # if attn.norm_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1727 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].attn.norm_q.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[9].attn.norm_q.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].attn.norm_q._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].attn.norm_q.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[9].attn.norm_q.weight, 140581773357792) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].attn.norm_q._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].attn.norm_q._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].attn.norm_q._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].attn.norm_q._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].attn.heads, accessed_by=DictGetItemGuardAccessor(heads) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[9].attn.heads == 24 # head_dim = inner_dim // attn.heads # diffusers/src/diffusers/models/attention_processor.py:1721 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].attn.processor, accessed_by=DictGetItemGuardAccessor(processor) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[9].attn.processor, 93831581524080) # attn_parameters = set(inspect.signature(self.processor.__call__).parameters.keys()) # diffusers/src/diffusers/models/attention_processor.py:479 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[9].attn.processor, 140581770193520) # return self.processor( # diffusers/src/diffusers/models/attention_processor.py:490 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].attn._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].attn._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].attn._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].attn._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].attn.forward, accessed_by=GetAttrGuardAccessor(forward) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].attn.forward, accessed_by=FuncDefaultsGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].attn.forward.__defaults__[0], accessed_by=GetItemGuardAccessor(0) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[9].attn.forward.__defaults__[0], 140591004478624) # batch_size, _, _ = hidden_states.shape if encoder_hidden_states is None else encoder_hidden_states.shape # diffusers/src/diffusers/models/attention_processor.py:1713 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].norm, accessed_by=DictGetItemGuardAccessor(norm) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[9].norm, 140581770193184) # norm_hidden_states, gate = self.norm(hidden_states, emb=temb) # diffusers/src/diffusers/models/transformers/transformer_flux.py:88 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].norm.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[9].norm.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].norm.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[9].norm.training, 140591004393440) # norm_hidden_states, gate = self.norm(hidden_states, emb=temb) # diffusers/src/diffusers/models/transformers/transformer_flux.py:88 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].norm._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].norm.norm, accessed_by=DictGetItemGuardAccessor(norm) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[9].norm.norm, 140581770193328) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:171 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].norm.norm.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].norm.norm.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[9].norm.norm.training, 140591004393440) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:171 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].norm.silu, accessed_by=DictGetItemGuardAccessor(silu) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[9].norm.silu, 140581770193232) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].norm.silu.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].norm.silu.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[9].norm.silu.training, 140591004393440) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].norm.linear, accessed_by=DictGetItemGuardAccessor(linear) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[9].norm.linear, 140581770193280) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].norm.linear.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].norm.linear.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[9].norm.linear.training, 140591004393440) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].norm._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].norm._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].norm._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].norm._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].act_mlp, accessed_by=DictGetItemGuardAccessor(act_mlp) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[9].act_mlp, 140581770193424) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].act_mlp.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].act_mlp.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[9].act_mlp.training, 140591004393440) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].proj_mlp, accessed_by=DictGetItemGuardAccessor(proj_mlp) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[9].proj_mlp, 140581770193376) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].proj_mlp.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].proj_mlp.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[9].proj_mlp.training, 140591004393440) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].proj_out, accessed_by=DictGetItemGuardAccessor(proj_out) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[9].proj_out, 140581770193472) # hidden_states = gate * self.proj_out(hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:98 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].proj_out.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].proj_out.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[9].proj_out.training, 140591004393440) # hidden_states = gate * self.proj_out(hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:98 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10], accessed_by=GetItemGuardAccessor(10) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[10], 140581770193136) # for index_block, block in enumerate(self.single_transformer_blocks): # diffusers/src/diffusers/models/transformers/transformer_flux.py:509 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[10].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[10].training, 140591004393440) # for index_block, block in enumerate(self.single_transformer_blocks): # diffusers/src/diffusers/models/transformers/transformer_flux.py:509 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].attn, accessed_by=DictGetItemGuardAccessor(attn) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[10].attn, 140581770194336) # attn_output = self.attn( # diffusers/src/diffusers/models/transformers/transformer_flux.py:91 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].attn.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[10].attn.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].attn.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[10].attn.training, 140591004393440) # attn_output = self.attn( # diffusers/src/diffusers/models/transformers/transformer_flux.py:91 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].attn._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].attn.to_k, accessed_by=DictGetItemGuardAccessor(to_k) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[10].attn.to_k, 140581770194480) # key = attn.to_k(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1717 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].attn.to_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].attn.to_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[10].attn.to_k.training, 140591004393440) # key = attn.to_k(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1717 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].attn.to_q, accessed_by=DictGetItemGuardAccessor(to_q) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[10].attn.to_q, 140581770194576) # query = attn.to_q(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1716 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].attn.to_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].attn.to_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[10].attn.to_q.training, 140591004393440) # query = attn.to_q(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1716 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].attn.to_v, accessed_by=DictGetItemGuardAccessor(to_v) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[10].attn.to_v, 140581770194624) # value = attn.to_v(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1718 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].attn.to_v.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].attn.to_v.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[10].attn.to_v.training, 140591004393440) # value = attn.to_v(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1718 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].attn.norm_k, accessed_by=DictGetItemGuardAccessor(norm_k) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[10].attn.norm_k, 140581770194528) # if attn.norm_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1729 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].attn.norm_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[10].attn.norm_k.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].attn.norm_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[10].attn.norm_k.training, 140591004393440) # if attn.norm_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1729 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].attn.norm_k.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[10].attn.norm_k.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].attn.norm_k._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].attn.norm_k.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[10].attn.norm_k.weight, 140581766061632) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].attn.norm_k._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].attn.norm_k._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].attn.norm_k._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].attn.norm_k._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].attn.norm_q, accessed_by=DictGetItemGuardAccessor(norm_q) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[10].attn.norm_q, 140581770194384) # if attn.norm_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1727 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].attn.norm_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[10].attn.norm_q.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].attn.norm_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[10].attn.norm_q.training, 140591004393440) # if attn.norm_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1727 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].attn.norm_q.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[10].attn.norm_q.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].attn.norm_q._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].attn.norm_q.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[10].attn.norm_q.weight, 140581765892352) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].attn.norm_q._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].attn.norm_q._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].attn.norm_q._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].attn.norm_q._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].attn.heads, accessed_by=DictGetItemGuardAccessor(heads) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[10].attn.heads == 24 # head_dim = inner_dim // attn.heads # diffusers/src/diffusers/models/attention_processor.py:1721 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].attn.processor, accessed_by=DictGetItemGuardAccessor(processor) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[10].attn.processor, 93831581524080) # attn_parameters = set(inspect.signature(self.processor.__call__).parameters.keys()) # diffusers/src/diffusers/models/attention_processor.py:479 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[10].attn.processor, 140581770194288) # return self.processor( # diffusers/src/diffusers/models/attention_processor.py:490 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].attn._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].attn._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].attn._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].attn._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].attn.forward, accessed_by=GetAttrGuardAccessor(forward) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].attn.forward, accessed_by=FuncDefaultsGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].attn.forward.__defaults__[0], accessed_by=GetItemGuardAccessor(0) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[10].attn.forward.__defaults__[0], 140591004478624) # batch_size, _, _ = hidden_states.shape if encoder_hidden_states is None else encoder_hidden_states.shape # diffusers/src/diffusers/models/attention_processor.py:1713 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].norm, accessed_by=DictGetItemGuardAccessor(norm) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[10].norm, 140581770193952) # norm_hidden_states, gate = self.norm(hidden_states, emb=temb) # diffusers/src/diffusers/models/transformers/transformer_flux.py:88 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].norm.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[10].norm.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].norm.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[10].norm.training, 140591004393440) # norm_hidden_states, gate = self.norm(hidden_states, emb=temb) # diffusers/src/diffusers/models/transformers/transformer_flux.py:88 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].norm._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].norm.norm, accessed_by=DictGetItemGuardAccessor(norm) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[10].norm.norm, 140581770194096) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:171 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].norm.norm.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].norm.norm.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[10].norm.norm.training, 140591004393440) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:171 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].norm.silu, accessed_by=DictGetItemGuardAccessor(silu) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[10].norm.silu, 140581770194000) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].norm.silu.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].norm.silu.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[10].norm.silu.training, 140591004393440) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].norm.linear, accessed_by=DictGetItemGuardAccessor(linear) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[10].norm.linear, 140581770194048) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].norm.linear.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].norm.linear.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[10].norm.linear.training, 140591004393440) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].norm._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].norm._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].norm._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].norm._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].act_mlp, accessed_by=DictGetItemGuardAccessor(act_mlp) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[10].act_mlp, 140581770194192) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].act_mlp.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].act_mlp.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[10].act_mlp.training, 140591004393440) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].proj_mlp, accessed_by=DictGetItemGuardAccessor(proj_mlp) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[10].proj_mlp, 140581770194144) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].proj_mlp.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].proj_mlp.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[10].proj_mlp.training, 140591004393440) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].proj_out, accessed_by=DictGetItemGuardAccessor(proj_out) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[10].proj_out, 140581770194240) # hidden_states = gate * self.proj_out(hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:98 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].proj_out.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].proj_out.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[10].proj_out.training, 140591004393440) # hidden_states = gate * self.proj_out(hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:98 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11], accessed_by=GetItemGuardAccessor(11) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[11], 140581770193904) # for index_block, block in enumerate(self.single_transformer_blocks): # diffusers/src/diffusers/models/transformers/transformer_flux.py:509 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[11].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[11].training, 140591004393440) # for index_block, block in enumerate(self.single_transformer_blocks): # diffusers/src/diffusers/models/transformers/transformer_flux.py:509 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].attn, accessed_by=DictGetItemGuardAccessor(attn) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[11].attn, 140581770195152) # attn_output = self.attn( # diffusers/src/diffusers/models/transformers/transformer_flux.py:91 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].attn.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[11].attn.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].attn.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[11].attn.training, 140591004393440) # attn_output = self.attn( # diffusers/src/diffusers/models/transformers/transformer_flux.py:91 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].attn._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].attn.to_k, accessed_by=DictGetItemGuardAccessor(to_k) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[11].attn.to_k, 140581770195344) # key = attn.to_k(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1717 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].attn.to_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].attn.to_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[11].attn.to_k.training, 140591004393440) # key = attn.to_k(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1717 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].attn.to_q, accessed_by=DictGetItemGuardAccessor(to_q) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[11].attn.to_q, 140581770195440) # query = attn.to_q(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1716 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].attn.to_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].attn.to_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[11].attn.to_q.training, 140591004393440) # query = attn.to_q(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1716 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].attn.to_v, accessed_by=DictGetItemGuardAccessor(to_v) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[11].attn.to_v, 140581770195488) # value = attn.to_v(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1718 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].attn.to_v.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].attn.to_v.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[11].attn.to_v.training, 140591004393440) # value = attn.to_v(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1718 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].attn.norm_k, accessed_by=DictGetItemGuardAccessor(norm_k) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[11].attn.norm_k, 140581770195392) # if attn.norm_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1729 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].attn.norm_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[11].attn.norm_k.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].attn.norm_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[11].attn.norm_k.training, 140591004393440) # if attn.norm_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1729 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].attn.norm_k.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[11].attn.norm_k.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].attn.norm_k._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].attn.norm_k.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[11].attn.norm_k.weight, 140581783350912) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].attn.norm_k._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].attn.norm_k._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].attn.norm_k._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].attn.norm_k._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].attn.norm_q, accessed_by=DictGetItemGuardAccessor(norm_q) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[11].attn.norm_q, 140581770195200) # if attn.norm_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1727 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].attn.norm_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[11].attn.norm_q.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].attn.norm_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[11].attn.norm_q.training, 140591004393440) # if attn.norm_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1727 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].attn.norm_q.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[11].attn.norm_q.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].attn.norm_q._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].attn.norm_q.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[11].attn.norm_q.weight, 140581783354592) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].attn.norm_q._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].attn.norm_q._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].attn.norm_q._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].attn.norm_q._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].attn.heads, accessed_by=DictGetItemGuardAccessor(heads) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[11].attn.heads == 24 # head_dim = inner_dim // attn.heads # diffusers/src/diffusers/models/attention_processor.py:1721 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].attn.processor, accessed_by=DictGetItemGuardAccessor(processor) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[11].attn.processor, 93831581524080) # attn_parameters = set(inspect.signature(self.processor.__call__).parameters.keys()) # diffusers/src/diffusers/models/attention_processor.py:479 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[11].attn.processor, 140581770195104) # return self.processor( # diffusers/src/diffusers/models/attention_processor.py:490 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].attn._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].attn._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].attn._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].attn._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].attn.forward, accessed_by=GetAttrGuardAccessor(forward) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].attn.forward, accessed_by=FuncDefaultsGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].attn.forward.__defaults__[0], accessed_by=GetItemGuardAccessor(0) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[11].attn.forward.__defaults__[0], 140591004478624) # batch_size, _, _ = hidden_states.shape if encoder_hidden_states is None else encoder_hidden_states.shape # diffusers/src/diffusers/models/attention_processor.py:1713 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].norm, accessed_by=DictGetItemGuardAccessor(norm) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[11].norm, 140581770194720) # norm_hidden_states, gate = self.norm(hidden_states, emb=temb) # diffusers/src/diffusers/models/transformers/transformer_flux.py:88 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].norm.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[11].norm.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].norm.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[11].norm.training, 140591004393440) # norm_hidden_states, gate = self.norm(hidden_states, emb=temb) # diffusers/src/diffusers/models/transformers/transformer_flux.py:88 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].norm._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].norm.norm, accessed_by=DictGetItemGuardAccessor(norm) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[11].norm.norm, 140581770194864) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:171 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].norm.norm.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].norm.norm.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[11].norm.norm.training, 140591004393440) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:171 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].norm.silu, accessed_by=DictGetItemGuardAccessor(silu) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[11].norm.silu, 140581770194768) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].norm.silu.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].norm.silu.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[11].norm.silu.training, 140591004393440) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].norm.linear, accessed_by=DictGetItemGuardAccessor(linear) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[11].norm.linear, 140581770194816) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].norm.linear.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].norm.linear.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[11].norm.linear.training, 140591004393440) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].norm._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].norm._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].norm._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].norm._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].act_mlp, accessed_by=DictGetItemGuardAccessor(act_mlp) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[11].act_mlp, 140581770195008) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].act_mlp.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].act_mlp.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[11].act_mlp.training, 140591004393440) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].proj_mlp, accessed_by=DictGetItemGuardAccessor(proj_mlp) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[11].proj_mlp, 140581770194912) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].proj_mlp.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].proj_mlp.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[11].proj_mlp.training, 140591004393440) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].proj_out, accessed_by=DictGetItemGuardAccessor(proj_out) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[11].proj_out, 140581770195056) # hidden_states = gate * self.proj_out(hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:98 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].proj_out.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].proj_out.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[11].proj_out.training, 140591004393440) # hidden_states = gate * self.proj_out(hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:98 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12], accessed_by=GetItemGuardAccessor(12) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[12], 140581770194672) # for index_block, block in enumerate(self.single_transformer_blocks): # diffusers/src/diffusers/models/transformers/transformer_flux.py:509 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[12].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[12].training, 140591004393440) # for index_block, block in enumerate(self.single_transformer_blocks): # diffusers/src/diffusers/models/transformers/transformer_flux.py:509 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].attn, accessed_by=DictGetItemGuardAccessor(attn) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[12].attn, 140581770195968) # attn_output = self.attn( # diffusers/src/diffusers/models/transformers/transformer_flux.py:91 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].attn.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[12].attn.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].attn.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[12].attn.training, 140591004393440) # attn_output = self.attn( # diffusers/src/diffusers/models/transformers/transformer_flux.py:91 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].attn._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].attn.to_k, accessed_by=DictGetItemGuardAccessor(to_k) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[12].attn.to_k, 140581770196112) # key = attn.to_k(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1717 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].attn.to_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].attn.to_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[12].attn.to_k.training, 140591004393440) # key = attn.to_k(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1717 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].attn.to_q, accessed_by=DictGetItemGuardAccessor(to_q) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[12].attn.to_q, 140581770196208) # query = attn.to_q(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1716 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].attn.to_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].attn.to_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[12].attn.to_q.training, 140591004393440) # query = attn.to_q(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1716 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].attn.to_v, accessed_by=DictGetItemGuardAccessor(to_v) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[12].attn.to_v, 140581770196256) # value = attn.to_v(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1718 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].attn.to_v.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].attn.to_v.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[12].attn.to_v.training, 140591004393440) # value = attn.to_v(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1718 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].attn.norm_k, accessed_by=DictGetItemGuardAccessor(norm_k) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[12].attn.norm_k, 140581770196160) # if attn.norm_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1729 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].attn.norm_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[12].attn.norm_k.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].attn.norm_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[12].attn.norm_k.training, 140591004393440) # if attn.norm_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1729 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].attn.norm_k.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[12].attn.norm_k.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].attn.norm_k._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].attn.norm_k.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[12].attn.norm_k.weight, 140581783352912) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].attn.norm_k._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].attn.norm_k._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].attn.norm_k._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].attn.norm_k._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].attn.norm_q, accessed_by=DictGetItemGuardAccessor(norm_q) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[12].attn.norm_q, 140581770196016) # if attn.norm_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1727 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].attn.norm_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[12].attn.norm_q.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].attn.norm_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[12].attn.norm_q.training, 140591004393440) # if attn.norm_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1727 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].attn.norm_q.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[12].attn.norm_q.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].attn.norm_q._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].attn.norm_q.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[12].attn.norm_q.weight, 140581783352432) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].attn.norm_q._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].attn.norm_q._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].attn.norm_q._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].attn.norm_q._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].attn.heads, accessed_by=DictGetItemGuardAccessor(heads) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[12].attn.heads == 24 # head_dim = inner_dim // attn.heads # diffusers/src/diffusers/models/attention_processor.py:1721 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].attn.processor, accessed_by=DictGetItemGuardAccessor(processor) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[12].attn.processor, 93831581524080) # attn_parameters = set(inspect.signature(self.processor.__call__).parameters.keys()) # diffusers/src/diffusers/models/attention_processor.py:479 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[12].attn.processor, 140581770195920) # return self.processor( # diffusers/src/diffusers/models/attention_processor.py:490 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].attn._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].attn._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].attn._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].attn._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].attn.forward, accessed_by=GetAttrGuardAccessor(forward) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].attn.forward, accessed_by=FuncDefaultsGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].attn.forward.__defaults__[0], accessed_by=GetItemGuardAccessor(0) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[12].attn.forward.__defaults__[0], 140591004478624) # batch_size, _, _ = hidden_states.shape if encoder_hidden_states is None else encoder_hidden_states.shape # diffusers/src/diffusers/models/attention_processor.py:1713 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].norm, accessed_by=DictGetItemGuardAccessor(norm) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[12].norm, 140581770195584) # norm_hidden_states, gate = self.norm(hidden_states, emb=temb) # diffusers/src/diffusers/models/transformers/transformer_flux.py:88 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].norm.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[12].norm.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].norm.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[12].norm.training, 140591004393440) # norm_hidden_states, gate = self.norm(hidden_states, emb=temb) # diffusers/src/diffusers/models/transformers/transformer_flux.py:88 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].norm._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].norm.norm, accessed_by=DictGetItemGuardAccessor(norm) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[12].norm.norm, 140581770195728) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:171 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].norm.norm.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].norm.norm.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[12].norm.norm.training, 140591004393440) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:171 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].norm.silu, accessed_by=DictGetItemGuardAccessor(silu) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[12].norm.silu, 140581770195632) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].norm.silu.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].norm.silu.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[12].norm.silu.training, 140591004393440) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].norm.linear, accessed_by=DictGetItemGuardAccessor(linear) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[12].norm.linear, 140581770195680) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].norm.linear.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].norm.linear.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[12].norm.linear.training, 140591004393440) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].norm._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].norm._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].norm._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].norm._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].act_mlp, accessed_by=DictGetItemGuardAccessor(act_mlp) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[12].act_mlp, 140581770195824) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].act_mlp.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].act_mlp.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[12].act_mlp.training, 140591004393440) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].proj_mlp, accessed_by=DictGetItemGuardAccessor(proj_mlp) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[12].proj_mlp, 140581770195776) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].proj_mlp.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].proj_mlp.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[12].proj_mlp.training, 140591004393440) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].proj_out, accessed_by=DictGetItemGuardAccessor(proj_out) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[12].proj_out, 140581770195872) # hidden_states = gate * self.proj_out(hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:98 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].proj_out.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].proj_out.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[12].proj_out.training, 140591004393440) # hidden_states = gate * self.proj_out(hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:98 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13], accessed_by=GetItemGuardAccessor(13) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[13], 140581770195536) # for index_block, block in enumerate(self.single_transformer_blocks): # diffusers/src/diffusers/models/transformers/transformer_flux.py:509 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[13].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[13].training, 140591004393440) # for index_block, block in enumerate(self.single_transformer_blocks): # diffusers/src/diffusers/models/transformers/transformer_flux.py:509 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].attn, accessed_by=DictGetItemGuardAccessor(attn) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[13].attn, 140581770196736) # attn_output = self.attn( # diffusers/src/diffusers/models/transformers/transformer_flux.py:91 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].attn.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[13].attn.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].attn.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[13].attn.training, 140591004393440) # attn_output = self.attn( # diffusers/src/diffusers/models/transformers/transformer_flux.py:91 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].attn._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].attn.to_k, accessed_by=DictGetItemGuardAccessor(to_k) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[13].attn.to_k, 140581770196880) # key = attn.to_k(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1717 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].attn.to_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].attn.to_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[13].attn.to_k.training, 140591004393440) # key = attn.to_k(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1717 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].attn.to_q, accessed_by=DictGetItemGuardAccessor(to_q) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[13].attn.to_q, 140581770196976) # query = attn.to_q(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1716 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].attn.to_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].attn.to_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[13].attn.to_q.training, 140591004393440) # query = attn.to_q(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1716 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].attn.to_v, accessed_by=DictGetItemGuardAccessor(to_v) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[13].attn.to_v, 140581770197024) # value = attn.to_v(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1718 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].attn.to_v.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].attn.to_v.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[13].attn.to_v.training, 140591004393440) # value = attn.to_v(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1718 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].attn.norm_k, accessed_by=DictGetItemGuardAccessor(norm_k) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[13].attn.norm_k, 140581770196928) # if attn.norm_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1729 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].attn.norm_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[13].attn.norm_k.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].attn.norm_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[13].attn.norm_k.training, 140591004393440) # if attn.norm_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1729 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].attn.norm_k.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[13].attn.norm_k.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].attn.norm_k._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].attn.norm_k.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[13].attn.norm_k.weight, 140581772773872) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].attn.norm_k._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].attn.norm_k._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].attn.norm_k._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].attn.norm_k._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].attn.norm_q, accessed_by=DictGetItemGuardAccessor(norm_q) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[13].attn.norm_q, 140581770196784) # if attn.norm_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1727 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].attn.norm_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[13].attn.norm_q.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].attn.norm_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[13].attn.norm_q.training, 140591004393440) # if attn.norm_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1727 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].attn.norm_q.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[13].attn.norm_q.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].attn.norm_q._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].attn.norm_q.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[13].attn.norm_q.weight, 140581772771872) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].attn.norm_q._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].attn.norm_q._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].attn.norm_q._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].attn.norm_q._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].attn.heads, accessed_by=DictGetItemGuardAccessor(heads) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[13].attn.heads == 24 # head_dim = inner_dim // attn.heads # diffusers/src/diffusers/models/attention_processor.py:1721 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].attn.processor, accessed_by=DictGetItemGuardAccessor(processor) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[13].attn.processor, 93831581524080) # attn_parameters = set(inspect.signature(self.processor.__call__).parameters.keys()) # diffusers/src/diffusers/models/attention_processor.py:479 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[13].attn.processor, 140581770196688) # return self.processor( # diffusers/src/diffusers/models/attention_processor.py:490 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].attn._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].attn._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].attn._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].attn._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].attn.forward, accessed_by=GetAttrGuardAccessor(forward) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].attn.forward, accessed_by=FuncDefaultsGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].attn.forward.__defaults__[0], accessed_by=GetItemGuardAccessor(0) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[13].attn.forward.__defaults__[0], 140591004478624) # batch_size, _, _ = hidden_states.shape if encoder_hidden_states is None else encoder_hidden_states.shape # diffusers/src/diffusers/models/attention_processor.py:1713 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].norm, accessed_by=DictGetItemGuardAccessor(norm) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[13].norm, 140581770196352) # norm_hidden_states, gate = self.norm(hidden_states, emb=temb) # diffusers/src/diffusers/models/transformers/transformer_flux.py:88 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].norm.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[13].norm.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].norm.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[13].norm.training, 140591004393440) # norm_hidden_states, gate = self.norm(hidden_states, emb=temb) # diffusers/src/diffusers/models/transformers/transformer_flux.py:88 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].norm._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].norm.norm, accessed_by=DictGetItemGuardAccessor(norm) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[13].norm.norm, 140581770196496) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:171 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].norm.norm.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].norm.norm.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[13].norm.norm.training, 140591004393440) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:171 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].norm.silu, accessed_by=DictGetItemGuardAccessor(silu) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[13].norm.silu, 140581770196400) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].norm.silu.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].norm.silu.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[13].norm.silu.training, 140591004393440) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].norm.linear, accessed_by=DictGetItemGuardAccessor(linear) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[13].norm.linear, 140581770196448) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].norm.linear.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].norm.linear.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[13].norm.linear.training, 140591004393440) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].norm._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].norm._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].norm._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].norm._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].act_mlp, accessed_by=DictGetItemGuardAccessor(act_mlp) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[13].act_mlp, 140581770196592) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].act_mlp.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].act_mlp.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[13].act_mlp.training, 140591004393440) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].proj_mlp, accessed_by=DictGetItemGuardAccessor(proj_mlp) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[13].proj_mlp, 140581770196544) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].proj_mlp.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].proj_mlp.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[13].proj_mlp.training, 140591004393440) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].proj_out, accessed_by=DictGetItemGuardAccessor(proj_out) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[13].proj_out, 140581770196640) # hidden_states = gate * self.proj_out(hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:98 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].proj_out.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].proj_out.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[13].proj_out.training, 140591004393440) # hidden_states = gate * self.proj_out(hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:98 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14], accessed_by=GetItemGuardAccessor(14) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[14], 140581770196304) # for index_block, block in enumerate(self.single_transformer_blocks): # diffusers/src/diffusers/models/transformers/transformer_flux.py:509 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[14].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[14].training, 140591004393440) # for index_block, block in enumerate(self.single_transformer_blocks): # diffusers/src/diffusers/models/transformers/transformer_flux.py:509 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].attn, accessed_by=DictGetItemGuardAccessor(attn) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[14].attn, 140581770197504) # attn_output = self.attn( # diffusers/src/diffusers/models/transformers/transformer_flux.py:91 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].attn.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[14].attn.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].attn.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[14].attn.training, 140591004393440) # attn_output = self.attn( # diffusers/src/diffusers/models/transformers/transformer_flux.py:91 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].attn._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].attn.to_k, accessed_by=DictGetItemGuardAccessor(to_k) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[14].attn.to_k, 140581770197648) # key = attn.to_k(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1717 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].attn.to_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].attn.to_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[14].attn.to_k.training, 140591004393440) # key = attn.to_k(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1717 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].attn.to_q, accessed_by=DictGetItemGuardAccessor(to_q) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[14].attn.to_q, 140581770197744) # query = attn.to_q(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1716 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].attn.to_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].attn.to_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[14].attn.to_q.training, 140591004393440) # query = attn.to_q(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1716 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].attn.to_v, accessed_by=DictGetItemGuardAccessor(to_v) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[14].attn.to_v, 140581770197792) # value = attn.to_v(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1718 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].attn.to_v.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].attn.to_v.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[14].attn.to_v.training, 140591004393440) # value = attn.to_v(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1718 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].attn.norm_k, accessed_by=DictGetItemGuardAccessor(norm_k) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[14].attn.norm_k, 140581770197696) # if attn.norm_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1729 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].attn.norm_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[14].attn.norm_k.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].attn.norm_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[14].attn.norm_k.training, 140591004393440) # if attn.norm_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1729 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].attn.norm_k.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[14].attn.norm_k.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].attn.norm_k._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].attn.norm_k.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[14].attn.norm_k.weight, 140581772772272) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].attn.norm_k._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].attn.norm_k._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].attn.norm_k._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].attn.norm_k._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].attn.norm_q, accessed_by=DictGetItemGuardAccessor(norm_q) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[14].attn.norm_q, 140581770197552) # if attn.norm_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1727 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].attn.norm_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[14].attn.norm_q.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].attn.norm_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[14].attn.norm_q.training, 140591004393440) # if attn.norm_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1727 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].attn.norm_q.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[14].attn.norm_q.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].attn.norm_q._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].attn.norm_q.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[14].attn.norm_q.weight, 140581771722112) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].attn.norm_q._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].attn.norm_q._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].attn.norm_q._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].attn.norm_q._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].attn.heads, accessed_by=DictGetItemGuardAccessor(heads) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[14].attn.heads == 24 # head_dim = inner_dim // attn.heads # diffusers/src/diffusers/models/attention_processor.py:1721 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].attn.processor, accessed_by=DictGetItemGuardAccessor(processor) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[14].attn.processor, 93831581524080) # attn_parameters = set(inspect.signature(self.processor.__call__).parameters.keys()) # diffusers/src/diffusers/models/attention_processor.py:479 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[14].attn.processor, 140581770197456) # return self.processor( # diffusers/src/diffusers/models/attention_processor.py:490 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].attn._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].attn._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].attn._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].attn._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].attn.forward, accessed_by=GetAttrGuardAccessor(forward) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].attn.forward, accessed_by=FuncDefaultsGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].attn.forward.__defaults__[0], accessed_by=GetItemGuardAccessor(0) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[14].attn.forward.__defaults__[0], 140591004478624) # batch_size, _, _ = hidden_states.shape if encoder_hidden_states is None else encoder_hidden_states.shape # diffusers/src/diffusers/models/attention_processor.py:1713 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].norm, accessed_by=DictGetItemGuardAccessor(norm) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[14].norm, 140581770197120) # norm_hidden_states, gate = self.norm(hidden_states, emb=temb) # diffusers/src/diffusers/models/transformers/transformer_flux.py:88 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].norm.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[14].norm.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].norm.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[14].norm.training, 140591004393440) # norm_hidden_states, gate = self.norm(hidden_states, emb=temb) # diffusers/src/diffusers/models/transformers/transformer_flux.py:88 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].norm._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].norm.norm, accessed_by=DictGetItemGuardAccessor(norm) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[14].norm.norm, 140581770197264) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:171 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].norm.norm.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].norm.norm.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[14].norm.norm.training, 140591004393440) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:171 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].norm.silu, accessed_by=DictGetItemGuardAccessor(silu) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[14].norm.silu, 140581770197168) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].norm.silu.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].norm.silu.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[14].norm.silu.training, 140591004393440) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].norm.linear, accessed_by=DictGetItemGuardAccessor(linear) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[14].norm.linear, 140581770197216) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].norm.linear.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].norm.linear.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[14].norm.linear.training, 140591004393440) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].norm._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].norm._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].norm._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].norm._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].act_mlp, accessed_by=DictGetItemGuardAccessor(act_mlp) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[14].act_mlp, 140581770197360) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].act_mlp.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].act_mlp.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[14].act_mlp.training, 140591004393440) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].proj_mlp, accessed_by=DictGetItemGuardAccessor(proj_mlp) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[14].proj_mlp, 140581770197312) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].proj_mlp.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].proj_mlp.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[14].proj_mlp.training, 140591004393440) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].proj_out, accessed_by=DictGetItemGuardAccessor(proj_out) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[14].proj_out, 140581770197408) # hidden_states = gate * self.proj_out(hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:98 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].proj_out.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].proj_out.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[14].proj_out.training, 140591004393440) # hidden_states = gate * self.proj_out(hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:98 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15], accessed_by=GetItemGuardAccessor(15) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[15], 140581770197072) # for index_block, block in enumerate(self.single_transformer_blocks): # diffusers/src/diffusers/models/transformers/transformer_flux.py:509 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[15].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[15].training, 140591004393440) # for index_block, block in enumerate(self.single_transformer_blocks): # diffusers/src/diffusers/models/transformers/transformer_flux.py:509 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].attn, accessed_by=DictGetItemGuardAccessor(attn) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[15].attn, 140581770198272) # attn_output = self.attn( # diffusers/src/diffusers/models/transformers/transformer_flux.py:91 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].attn.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[15].attn.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].attn.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[15].attn.training, 140591004393440) # attn_output = self.attn( # diffusers/src/diffusers/models/transformers/transformer_flux.py:91 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].attn._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].attn.to_k, accessed_by=DictGetItemGuardAccessor(to_k) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[15].attn.to_k, 140581770198416) # key = attn.to_k(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1717 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].attn.to_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].attn.to_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[15].attn.to_k.training, 140591004393440) # key = attn.to_k(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1717 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].attn.to_q, accessed_by=DictGetItemGuardAccessor(to_q) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[15].attn.to_q, 140581770198512) # query = attn.to_q(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1716 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].attn.to_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].attn.to_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[15].attn.to_q.training, 140591004393440) # query = attn.to_q(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1716 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].attn.to_v, accessed_by=DictGetItemGuardAccessor(to_v) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[15].attn.to_v, 140581770198560) # value = attn.to_v(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1718 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].attn.to_v.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].attn.to_v.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[15].attn.to_v.training, 140591004393440) # value = attn.to_v(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1718 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].attn.norm_k, accessed_by=DictGetItemGuardAccessor(norm_k) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[15].attn.norm_k, 140581770198464) # if attn.norm_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1729 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].attn.norm_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[15].attn.norm_k.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].attn.norm_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[15].attn.norm_k.training, 140591004393440) # if attn.norm_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1729 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].attn.norm_k.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[15].attn.norm_k.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].attn.norm_k._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].attn.norm_k.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[15].attn.norm_k.weight, 140581772775472) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].attn.norm_k._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].attn.norm_k._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].attn.norm_k._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].attn.norm_k._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].attn.norm_q, accessed_by=DictGetItemGuardAccessor(norm_q) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[15].attn.norm_q, 140581770198320) # if attn.norm_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1727 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].attn.norm_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[15].attn.norm_q.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].attn.norm_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[15].attn.norm_q.training, 140591004393440) # if attn.norm_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1727 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].attn.norm_q.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[15].attn.norm_q.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].attn.norm_q._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].attn.norm_q.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[15].attn.norm_q.weight, 140581772744304) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].attn.norm_q._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].attn.norm_q._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].attn.norm_q._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].attn.norm_q._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].attn.heads, accessed_by=DictGetItemGuardAccessor(heads) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[15].attn.heads == 24 # head_dim = inner_dim // attn.heads # diffusers/src/diffusers/models/attention_processor.py:1721 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].attn.processor, accessed_by=DictGetItemGuardAccessor(processor) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[15].attn.processor, 93831581524080) # attn_parameters = set(inspect.signature(self.processor.__call__).parameters.keys()) # diffusers/src/diffusers/models/attention_processor.py:479 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[15].attn.processor, 140581770198224) # return self.processor( # diffusers/src/diffusers/models/attention_processor.py:490 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].attn._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].attn._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].attn._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].attn._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].attn.forward, accessed_by=GetAttrGuardAccessor(forward) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].attn.forward, accessed_by=FuncDefaultsGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].attn.forward.__defaults__[0], accessed_by=GetItemGuardAccessor(0) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[15].attn.forward.__defaults__[0], 140591004478624) # batch_size, _, _ = hidden_states.shape if encoder_hidden_states is None else encoder_hidden_states.shape # diffusers/src/diffusers/models/attention_processor.py:1713 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].norm, accessed_by=DictGetItemGuardAccessor(norm) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[15].norm, 140581770197888) # norm_hidden_states, gate = self.norm(hidden_states, emb=temb) # diffusers/src/diffusers/models/transformers/transformer_flux.py:88 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].norm.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[15].norm.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].norm.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[15].norm.training, 140591004393440) # norm_hidden_states, gate = self.norm(hidden_states, emb=temb) # diffusers/src/diffusers/models/transformers/transformer_flux.py:88 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].norm._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].norm.norm, accessed_by=DictGetItemGuardAccessor(norm) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[15].norm.norm, 140581770198032) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:171 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].norm.norm.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].norm.norm.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[15].norm.norm.training, 140591004393440) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:171 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].norm.silu, accessed_by=DictGetItemGuardAccessor(silu) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[15].norm.silu, 140581770197936) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].norm.silu.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].norm.silu.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[15].norm.silu.training, 140591004393440) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].norm.linear, accessed_by=DictGetItemGuardAccessor(linear) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[15].norm.linear, 140581770197984) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].norm.linear.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].norm.linear.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[15].norm.linear.training, 140591004393440) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].norm._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].norm._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].norm._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].norm._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].act_mlp, accessed_by=DictGetItemGuardAccessor(act_mlp) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[15].act_mlp, 140581770198128) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].act_mlp.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].act_mlp.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[15].act_mlp.training, 140591004393440) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].proj_mlp, accessed_by=DictGetItemGuardAccessor(proj_mlp) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[15].proj_mlp, 140581770198080) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].proj_mlp.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].proj_mlp.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[15].proj_mlp.training, 140591004393440) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].proj_out, accessed_by=DictGetItemGuardAccessor(proj_out) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[15].proj_out, 140581770198176) # hidden_states = gate * self.proj_out(hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:98 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].proj_out.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].proj_out.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[15].proj_out.training, 140591004393440) # hidden_states = gate * self.proj_out(hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:98 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16], accessed_by=GetItemGuardAccessor(16) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[16], 140581770197840) # for index_block, block in enumerate(self.single_transformer_blocks): # diffusers/src/diffusers/models/transformers/transformer_flux.py:509 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[16].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[16].training, 140591004393440) # for index_block, block in enumerate(self.single_transformer_blocks): # diffusers/src/diffusers/models/transformers/transformer_flux.py:509 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].attn, accessed_by=DictGetItemGuardAccessor(attn) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[16].attn, 140581770772544) # attn_output = self.attn( # diffusers/src/diffusers/models/transformers/transformer_flux.py:91 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].attn.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[16].attn.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].attn.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[16].attn.training, 140591004393440) # attn_output = self.attn( # diffusers/src/diffusers/models/transformers/transformer_flux.py:91 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].attn._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].attn.to_k, accessed_by=DictGetItemGuardAccessor(to_k) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[16].attn.to_k, 140581770772688) # key = attn.to_k(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1717 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].attn.to_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].attn.to_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[16].attn.to_k.training, 140591004393440) # key = attn.to_k(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1717 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].attn.to_q, accessed_by=DictGetItemGuardAccessor(to_q) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[16].attn.to_q, 140581770772784) # query = attn.to_q(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1716 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].attn.to_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].attn.to_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[16].attn.to_q.training, 140591004393440) # query = attn.to_q(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1716 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].attn.to_v, accessed_by=DictGetItemGuardAccessor(to_v) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[16].attn.to_v, 140581770772832) # value = attn.to_v(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1718 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].attn.to_v.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].attn.to_v.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[16].attn.to_v.training, 140591004393440) # value = attn.to_v(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1718 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].attn.norm_k, accessed_by=DictGetItemGuardAccessor(norm_k) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[16].attn.norm_k, 140581770772736) # if attn.norm_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1729 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].attn.norm_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[16].attn.norm_k.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].attn.norm_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[16].attn.norm_k.training, 140591004393440) # if attn.norm_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1729 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].attn.norm_k.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[16].attn.norm_k.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].attn.norm_k._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].attn.norm_k.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[16].attn.norm_k.weight, 140581772777712) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].attn.norm_k._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].attn.norm_k._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].attn.norm_k._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].attn.norm_k._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].attn.norm_q, accessed_by=DictGetItemGuardAccessor(norm_q) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[16].attn.norm_q, 140581770772592) # if attn.norm_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1727 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].attn.norm_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[16].attn.norm_q.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].attn.norm_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[16].attn.norm_q.training, 140591004393440) # if attn.norm_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1727 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].attn.norm_q.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[16].attn.norm_q.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].attn.norm_q._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].attn.norm_q.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[16].attn.norm_q.weight, 140581772780512) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].attn.norm_q._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].attn.norm_q._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].attn.norm_q._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].attn.norm_q._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].attn.heads, accessed_by=DictGetItemGuardAccessor(heads) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[16].attn.heads == 24 # head_dim = inner_dim // attn.heads # diffusers/src/diffusers/models/attention_processor.py:1721 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].attn.processor, accessed_by=DictGetItemGuardAccessor(processor) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[16].attn.processor, 93831581524080) # attn_parameters = set(inspect.signature(self.processor.__call__).parameters.keys()) # diffusers/src/diffusers/models/attention_processor.py:479 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[16].attn.processor, 140581770198992) # return self.processor( # diffusers/src/diffusers/models/attention_processor.py:490 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].attn._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].attn._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].attn._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].attn._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].attn.forward, accessed_by=GetAttrGuardAccessor(forward) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].attn.forward, accessed_by=FuncDefaultsGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].attn.forward.__defaults__[0], accessed_by=GetItemGuardAccessor(0) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[16].attn.forward.__defaults__[0], 140591004478624) # batch_size, _, _ = hidden_states.shape if encoder_hidden_states is None else encoder_hidden_states.shape # diffusers/src/diffusers/models/attention_processor.py:1713 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].norm, accessed_by=DictGetItemGuardAccessor(norm) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[16].norm, 140581770198656) # norm_hidden_states, gate = self.norm(hidden_states, emb=temb) # diffusers/src/diffusers/models/transformers/transformer_flux.py:88 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].norm.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[16].norm.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].norm.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[16].norm.training, 140591004393440) # norm_hidden_states, gate = self.norm(hidden_states, emb=temb) # diffusers/src/diffusers/models/transformers/transformer_flux.py:88 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].norm._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].norm.norm, accessed_by=DictGetItemGuardAccessor(norm) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[16].norm.norm, 140581770198800) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:171 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].norm.norm.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].norm.norm.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[16].norm.norm.training, 140591004393440) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:171 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].norm.silu, accessed_by=DictGetItemGuardAccessor(silu) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[16].norm.silu, 140581770198704) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].norm.silu.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].norm.silu.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[16].norm.silu.training, 140591004393440) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].norm.linear, accessed_by=DictGetItemGuardAccessor(linear) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[16].norm.linear, 140581770198752) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].norm.linear.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].norm.linear.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[16].norm.linear.training, 140591004393440) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].norm._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].norm._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].norm._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].norm._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].act_mlp, accessed_by=DictGetItemGuardAccessor(act_mlp) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[16].act_mlp, 140581770198896) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].act_mlp.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].act_mlp.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[16].act_mlp.training, 140591004393440) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].proj_mlp, accessed_by=DictGetItemGuardAccessor(proj_mlp) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[16].proj_mlp, 140581770198848) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].proj_mlp.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].proj_mlp.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[16].proj_mlp.training, 140591004393440) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].proj_out, accessed_by=DictGetItemGuardAccessor(proj_out) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[16].proj_out, 140581770198944) # hidden_states = gate * self.proj_out(hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:98 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].proj_out.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].proj_out.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[16].proj_out.training, 140591004393440) # hidden_states = gate * self.proj_out(hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:98 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17], accessed_by=GetItemGuardAccessor(17) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[17], 140581770198608) # for index_block, block in enumerate(self.single_transformer_blocks): # diffusers/src/diffusers/models/transformers/transformer_flux.py:509 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[17].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[17].training, 140591004393440) # for index_block, block in enumerate(self.single_transformer_blocks): # diffusers/src/diffusers/models/transformers/transformer_flux.py:509 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].attn, accessed_by=DictGetItemGuardAccessor(attn) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[17].attn, 140581770773312) # attn_output = self.attn( # diffusers/src/diffusers/models/transformers/transformer_flux.py:91 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].attn.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[17].attn.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].attn.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[17].attn.training, 140591004393440) # attn_output = self.attn( # diffusers/src/diffusers/models/transformers/transformer_flux.py:91 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].attn._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].attn.to_k, accessed_by=DictGetItemGuardAccessor(to_k) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[17].attn.to_k, 140581770773456) # key = attn.to_k(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1717 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].attn.to_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].attn.to_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[17].attn.to_k.training, 140591004393440) # key = attn.to_k(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1717 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].attn.to_q, accessed_by=DictGetItemGuardAccessor(to_q) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[17].attn.to_q, 140581770773552) # query = attn.to_q(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1716 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].attn.to_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].attn.to_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[17].attn.to_q.training, 140591004393440) # query = attn.to_q(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1716 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].attn.to_v, accessed_by=DictGetItemGuardAccessor(to_v) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[17].attn.to_v, 140581770773600) # value = attn.to_v(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1718 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].attn.to_v.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].attn.to_v.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[17].attn.to_v.training, 140591004393440) # value = attn.to_v(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1718 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].attn.norm_k, accessed_by=DictGetItemGuardAccessor(norm_k) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[17].attn.norm_k, 140581770773504) # if attn.norm_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1729 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].attn.norm_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[17].attn.norm_k.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].attn.norm_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[17].attn.norm_k.training, 140591004393440) # if attn.norm_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1729 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].attn.norm_k.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[17].attn.norm_k.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].attn.norm_k._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].attn.norm_k.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[17].attn.norm_k.weight, 140581772714416) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].attn.norm_k._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].attn.norm_k._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].attn.norm_k._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].attn.norm_k._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].attn.norm_q, accessed_by=DictGetItemGuardAccessor(norm_q) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[17].attn.norm_q, 140581770773360) # if attn.norm_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1727 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].attn.norm_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[17].attn.norm_q.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].attn.norm_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[17].attn.norm_q.training, 140591004393440) # if attn.norm_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1727 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].attn.norm_q.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[17].attn.norm_q.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].attn.norm_q._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].attn.norm_q.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[17].attn.norm_q.weight, 140581765134464) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].attn.norm_q._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].attn.norm_q._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].attn.norm_q._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].attn.norm_q._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].attn.heads, accessed_by=DictGetItemGuardAccessor(heads) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[17].attn.heads == 24 # head_dim = inner_dim // attn.heads # diffusers/src/diffusers/models/attention_processor.py:1721 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].attn.processor, accessed_by=DictGetItemGuardAccessor(processor) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[17].attn.processor, 93831581524080) # attn_parameters = set(inspect.signature(self.processor.__call__).parameters.keys()) # diffusers/src/diffusers/models/attention_processor.py:479 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[17].attn.processor, 140581770773264) # return self.processor( # diffusers/src/diffusers/models/attention_processor.py:490 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].attn._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].attn._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].attn._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].attn._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].attn.forward, accessed_by=GetAttrGuardAccessor(forward) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].attn.forward, accessed_by=FuncDefaultsGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].attn.forward.__defaults__[0], accessed_by=GetItemGuardAccessor(0) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[17].attn.forward.__defaults__[0], 140591004478624) # batch_size, _, _ = hidden_states.shape if encoder_hidden_states is None else encoder_hidden_states.shape # diffusers/src/diffusers/models/attention_processor.py:1713 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].norm, accessed_by=DictGetItemGuardAccessor(norm) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[17].norm, 140581770772928) # norm_hidden_states, gate = self.norm(hidden_states, emb=temb) # diffusers/src/diffusers/models/transformers/transformer_flux.py:88 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].norm.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[17].norm.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].norm.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[17].norm.training, 140591004393440) # norm_hidden_states, gate = self.norm(hidden_states, emb=temb) # diffusers/src/diffusers/models/transformers/transformer_flux.py:88 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].norm._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].norm.norm, accessed_by=DictGetItemGuardAccessor(norm) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[17].norm.norm, 140581770773072) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:171 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].norm.norm.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].norm.norm.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[17].norm.norm.training, 140591004393440) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:171 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].norm.silu, accessed_by=DictGetItemGuardAccessor(silu) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[17].norm.silu, 140581770772976) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].norm.silu.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].norm.silu.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[17].norm.silu.training, 140591004393440) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].norm.linear, accessed_by=DictGetItemGuardAccessor(linear) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[17].norm.linear, 140581770773024) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].norm.linear.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].norm.linear.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[17].norm.linear.training, 140591004393440) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].norm._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].norm._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].norm._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].norm._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].act_mlp, accessed_by=DictGetItemGuardAccessor(act_mlp) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[17].act_mlp, 140581770773168) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].act_mlp.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].act_mlp.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[17].act_mlp.training, 140591004393440) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].proj_mlp, accessed_by=DictGetItemGuardAccessor(proj_mlp) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[17].proj_mlp, 140581770773120) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].proj_mlp.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].proj_mlp.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[17].proj_mlp.training, 140591004393440) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].proj_out, accessed_by=DictGetItemGuardAccessor(proj_out) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[17].proj_out, 140581770773216) # hidden_states = gate * self.proj_out(hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:98 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].proj_out.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].proj_out.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[17].proj_out.training, 140591004393440) # hidden_states = gate * self.proj_out(hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:98 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18], accessed_by=GetItemGuardAccessor(18) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[18], 140581770772880) # for index_block, block in enumerate(self.single_transformer_blocks): # diffusers/src/diffusers/models/transformers/transformer_flux.py:509 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[18].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[18].training, 140591004393440) # for index_block, block in enumerate(self.single_transformer_blocks): # diffusers/src/diffusers/models/transformers/transformer_flux.py:509 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].attn, accessed_by=DictGetItemGuardAccessor(attn) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[18].attn, 140581770774080) # attn_output = self.attn( # diffusers/src/diffusers/models/transformers/transformer_flux.py:91 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].attn.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[18].attn.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].attn.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[18].attn.training, 140591004393440) # attn_output = self.attn( # diffusers/src/diffusers/models/transformers/transformer_flux.py:91 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].attn._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].attn.to_k, accessed_by=DictGetItemGuardAccessor(to_k) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[18].attn.to_k, 140581770774224) # key = attn.to_k(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1717 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].attn.to_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].attn.to_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[18].attn.to_k.training, 140591004393440) # key = attn.to_k(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1717 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].attn.to_q, accessed_by=DictGetItemGuardAccessor(to_q) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[18].attn.to_q, 140581770774320) # query = attn.to_q(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1716 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].attn.to_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].attn.to_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[18].attn.to_q.training, 140591004393440) # query = attn.to_q(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1716 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].attn.to_v, accessed_by=DictGetItemGuardAccessor(to_v) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[18].attn.to_v, 140581770774368) # value = attn.to_v(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1718 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].attn.to_v.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].attn.to_v.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[18].attn.to_v.training, 140591004393440) # value = attn.to_v(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1718 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].attn.norm_k, accessed_by=DictGetItemGuardAccessor(norm_k) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[18].attn.norm_k, 140581770774272) # if attn.norm_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1729 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].attn.norm_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[18].attn.norm_k.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].attn.norm_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[18].attn.norm_k.training, 140591004393440) # if attn.norm_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1729 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].attn.norm_k.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[18].attn.norm_k.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].attn.norm_k._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].attn.norm_k.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[18].attn.norm_k.weight, 140581783349232) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].attn.norm_k._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].attn.norm_k._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].attn.norm_k._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].attn.norm_k._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].attn.norm_q, accessed_by=DictGetItemGuardAccessor(norm_q) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[18].attn.norm_q, 140581770774128) # if attn.norm_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1727 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].attn.norm_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[18].attn.norm_q.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].attn.norm_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[18].attn.norm_q.training, 140591004393440) # if attn.norm_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1727 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].attn.norm_q.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[18].attn.norm_q.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].attn.norm_q._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].attn.norm_q.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[18].attn.norm_q.weight, 140581772716976) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].attn.norm_q._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].attn.norm_q._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].attn.norm_q._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].attn.norm_q._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].attn.heads, accessed_by=DictGetItemGuardAccessor(heads) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[18].attn.heads == 24 # head_dim = inner_dim // attn.heads # diffusers/src/diffusers/models/attention_processor.py:1721 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].attn.processor, accessed_by=DictGetItemGuardAccessor(processor) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[18].attn.processor, 93831581524080) # attn_parameters = set(inspect.signature(self.processor.__call__).parameters.keys()) # diffusers/src/diffusers/models/attention_processor.py:479 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[18].attn.processor, 140581770774032) # return self.processor( # diffusers/src/diffusers/models/attention_processor.py:490 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].attn._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].attn._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].attn._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].attn._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].attn.forward, accessed_by=GetAttrGuardAccessor(forward) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].attn.forward, accessed_by=FuncDefaultsGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].attn.forward.__defaults__[0], accessed_by=GetItemGuardAccessor(0) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[18].attn.forward.__defaults__[0], 140591004478624) # batch_size, _, _ = hidden_states.shape if encoder_hidden_states is None else encoder_hidden_states.shape # diffusers/src/diffusers/models/attention_processor.py:1713 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].norm, accessed_by=DictGetItemGuardAccessor(norm) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[18].norm, 140581770773696) # norm_hidden_states, gate = self.norm(hidden_states, emb=temb) # diffusers/src/diffusers/models/transformers/transformer_flux.py:88 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].norm.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[18].norm.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].norm.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[18].norm.training, 140591004393440) # norm_hidden_states, gate = self.norm(hidden_states, emb=temb) # diffusers/src/diffusers/models/transformers/transformer_flux.py:88 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].norm._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].norm.norm, accessed_by=DictGetItemGuardAccessor(norm) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[18].norm.norm, 140581770773840) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:171 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].norm.norm.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].norm.norm.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[18].norm.norm.training, 140591004393440) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:171 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].norm.silu, accessed_by=DictGetItemGuardAccessor(silu) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[18].norm.silu, 140581770773744) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].norm.silu.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].norm.silu.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[18].norm.silu.training, 140591004393440) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].norm.linear, accessed_by=DictGetItemGuardAccessor(linear) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[18].norm.linear, 140581770773792) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].norm.linear.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].norm.linear.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[18].norm.linear.training, 140591004393440) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].norm._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].norm._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].norm._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].norm._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].act_mlp, accessed_by=DictGetItemGuardAccessor(act_mlp) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[18].act_mlp, 140581770773936) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].act_mlp.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].act_mlp.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[18].act_mlp.training, 140591004393440) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].proj_mlp, accessed_by=DictGetItemGuardAccessor(proj_mlp) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[18].proj_mlp, 140581770773888) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].proj_mlp.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].proj_mlp.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[18].proj_mlp.training, 140591004393440) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].proj_out, accessed_by=DictGetItemGuardAccessor(proj_out) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[18].proj_out, 140581770773984) # hidden_states = gate * self.proj_out(hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:98 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].proj_out.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].proj_out.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[18].proj_out.training, 140591004393440) # hidden_states = gate * self.proj_out(hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:98 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19], accessed_by=GetItemGuardAccessor(19) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[19], 140581770773648) # for index_block, block in enumerate(self.single_transformer_blocks): # diffusers/src/diffusers/models/transformers/transformer_flux.py:509 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[19].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[19].training, 140591004393440) # for index_block, block in enumerate(self.single_transformer_blocks): # diffusers/src/diffusers/models/transformers/transformer_flux.py:509 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].attn, accessed_by=DictGetItemGuardAccessor(attn) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[19].attn, 140581770774848) # attn_output = self.attn( # diffusers/src/diffusers/models/transformers/transformer_flux.py:91 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].attn.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[19].attn.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].attn.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[19].attn.training, 140591004393440) # attn_output = self.attn( # diffusers/src/diffusers/models/transformers/transformer_flux.py:91 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].attn._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].attn.to_k, accessed_by=DictGetItemGuardAccessor(to_k) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[19].attn.to_k, 140581770774992) # key = attn.to_k(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1717 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].attn.to_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].attn.to_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[19].attn.to_k.training, 140591004393440) # key = attn.to_k(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1717 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].attn.to_q, accessed_by=DictGetItemGuardAccessor(to_q) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[19].attn.to_q, 140581770775088) # query = attn.to_q(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1716 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].attn.to_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].attn.to_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[19].attn.to_q.training, 140591004393440) # query = attn.to_q(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1716 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].attn.to_v, accessed_by=DictGetItemGuardAccessor(to_v) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[19].attn.to_v, 140581770775136) # value = attn.to_v(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1718 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].attn.to_v.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].attn.to_v.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[19].attn.to_v.training, 140591004393440) # value = attn.to_v(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1718 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].attn.norm_k, accessed_by=DictGetItemGuardAccessor(norm_k) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[19].attn.norm_k, 140581770775040) # if attn.norm_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1729 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].attn.norm_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[19].attn.norm_k.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].attn.norm_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[19].attn.norm_k.training, 140591004393440) # if attn.norm_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1729 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].attn.norm_k.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[19].attn.norm_k.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].attn.norm_k._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].attn.norm_k.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[19].attn.norm_k.weight, 140581772719056) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].attn.norm_k._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].attn.norm_k._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].attn.norm_k._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].attn.norm_k._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].attn.norm_q, accessed_by=DictGetItemGuardAccessor(norm_q) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[19].attn.norm_q, 140581770774896) # if attn.norm_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1727 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].attn.norm_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[19].attn.norm_q.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].attn.norm_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[19].attn.norm_q.training, 140591004393440) # if attn.norm_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1727 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].attn.norm_q.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[19].attn.norm_q.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].attn.norm_q._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].attn.norm_q.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[19].attn.norm_q.weight, 140581765885248) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].attn.norm_q._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].attn.norm_q._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].attn.norm_q._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].attn.norm_q._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].attn.heads, accessed_by=DictGetItemGuardAccessor(heads) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[19].attn.heads == 24 # head_dim = inner_dim // attn.heads # diffusers/src/diffusers/models/attention_processor.py:1721 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].attn.processor, accessed_by=DictGetItemGuardAccessor(processor) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[19].attn.processor, 93831581524080) # attn_parameters = set(inspect.signature(self.processor.__call__).parameters.keys()) # diffusers/src/diffusers/models/attention_processor.py:479 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[19].attn.processor, 140581770774800) # return self.processor( # diffusers/src/diffusers/models/attention_processor.py:490 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].attn._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].attn._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].attn._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].attn._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].attn.forward, accessed_by=GetAttrGuardAccessor(forward) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].attn.forward, accessed_by=FuncDefaultsGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].attn.forward.__defaults__[0], accessed_by=GetItemGuardAccessor(0) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[19].attn.forward.__defaults__[0], 140591004478624) # batch_size, _, _ = hidden_states.shape if encoder_hidden_states is None else encoder_hidden_states.shape # diffusers/src/diffusers/models/attention_processor.py:1713 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].norm, accessed_by=DictGetItemGuardAccessor(norm) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[19].norm, 140581770774464) # norm_hidden_states, gate = self.norm(hidden_states, emb=temb) # diffusers/src/diffusers/models/transformers/transformer_flux.py:88 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].norm.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[19].norm.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].norm.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[19].norm.training, 140591004393440) # norm_hidden_states, gate = self.norm(hidden_states, emb=temb) # diffusers/src/diffusers/models/transformers/transformer_flux.py:88 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].norm._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].norm.norm, accessed_by=DictGetItemGuardAccessor(norm) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[19].norm.norm, 140581770774608) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:171 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].norm.norm.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].norm.norm.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[19].norm.norm.training, 140591004393440) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:171 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].norm.silu, accessed_by=DictGetItemGuardAccessor(silu) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[19].norm.silu, 140581770774512) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].norm.silu.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].norm.silu.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[19].norm.silu.training, 140591004393440) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].norm.linear, accessed_by=DictGetItemGuardAccessor(linear) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[19].norm.linear, 140581770774560) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].norm.linear.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].norm.linear.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[19].norm.linear.training, 140591004393440) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].norm._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].norm._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].norm._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].norm._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].act_mlp, accessed_by=DictGetItemGuardAccessor(act_mlp) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[19].act_mlp, 140581770774704) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].act_mlp.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].act_mlp.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[19].act_mlp.training, 140591004393440) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].proj_mlp, accessed_by=DictGetItemGuardAccessor(proj_mlp) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[19].proj_mlp, 140581770774656) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].proj_mlp.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].proj_mlp.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[19].proj_mlp.training, 140591004393440) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].proj_out, accessed_by=DictGetItemGuardAccessor(proj_out) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[19].proj_out, 140581770774752) # hidden_states = gate * self.proj_out(hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:98 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].proj_out.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].proj_out.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[19].proj_out.training, 140591004393440) # hidden_states = gate * self.proj_out(hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:98 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20], accessed_by=GetItemGuardAccessor(20) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[20], 140581770774416) # for index_block, block in enumerate(self.single_transformer_blocks): # diffusers/src/diffusers/models/transformers/transformer_flux.py:509 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[20].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[20].training, 140591004393440) # for index_block, block in enumerate(self.single_transformer_blocks): # diffusers/src/diffusers/models/transformers/transformer_flux.py:509 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].attn, accessed_by=DictGetItemGuardAccessor(attn) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[20].attn, 140581770775616) # attn_output = self.attn( # diffusers/src/diffusers/models/transformers/transformer_flux.py:91 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].attn.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[20].attn.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].attn.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[20].attn.training, 140591004393440) # attn_output = self.attn( # diffusers/src/diffusers/models/transformers/transformer_flux.py:91 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].attn._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].attn.to_k, accessed_by=DictGetItemGuardAccessor(to_k) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[20].attn.to_k, 140581770775760) # key = attn.to_k(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1717 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].attn.to_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].attn.to_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[20].attn.to_k.training, 140591004393440) # key = attn.to_k(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1717 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].attn.to_q, accessed_by=DictGetItemGuardAccessor(to_q) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[20].attn.to_q, 140581770775856) # query = attn.to_q(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1716 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].attn.to_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].attn.to_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[20].attn.to_q.training, 140591004393440) # query = attn.to_q(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1716 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].attn.to_v, accessed_by=DictGetItemGuardAccessor(to_v) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[20].attn.to_v, 140581770775904) # value = attn.to_v(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1718 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].attn.to_v.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].attn.to_v.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[20].attn.to_v.training, 140591004393440) # value = attn.to_v(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1718 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].attn.norm_k, accessed_by=DictGetItemGuardAccessor(norm_k) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[20].attn.norm_k, 140581770775808) # if attn.norm_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1729 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].attn.norm_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[20].attn.norm_k.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].attn.norm_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[20].attn.norm_k.training, 140591004393440) # if attn.norm_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1729 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].attn.norm_k.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[20].attn.norm_k.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].attn.norm_k._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].attn.norm_k.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[20].attn.norm_k.weight, 140581773351712) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].attn.norm_k._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].attn.norm_k._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].attn.norm_k._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].attn.norm_k._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].attn.norm_q, accessed_by=DictGetItemGuardAccessor(norm_q) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[20].attn.norm_q, 140581770775664) # if attn.norm_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1727 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].attn.norm_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[20].attn.norm_q.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].attn.norm_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[20].attn.norm_q.training, 140591004393440) # if attn.norm_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1727 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].attn.norm_q.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[20].attn.norm_q.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].attn.norm_q._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].attn.norm_q.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[20].attn.norm_q.weight, 140581772774352) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].attn.norm_q._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].attn.norm_q._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].attn.norm_q._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].attn.norm_q._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].attn.heads, accessed_by=DictGetItemGuardAccessor(heads) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[20].attn.heads == 24 # head_dim = inner_dim // attn.heads # diffusers/src/diffusers/models/attention_processor.py:1721 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].attn.processor, accessed_by=DictGetItemGuardAccessor(processor) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[20].attn.processor, 93831581524080) # attn_parameters = set(inspect.signature(self.processor.__call__).parameters.keys()) # diffusers/src/diffusers/models/attention_processor.py:479 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[20].attn.processor, 140581770775568) # return self.processor( # diffusers/src/diffusers/models/attention_processor.py:490 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].attn._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].attn._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].attn._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].attn._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].attn.forward, accessed_by=GetAttrGuardAccessor(forward) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].attn.forward, accessed_by=FuncDefaultsGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].attn.forward.__defaults__[0], accessed_by=GetItemGuardAccessor(0) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[20].attn.forward.__defaults__[0], 140591004478624) # batch_size, _, _ = hidden_states.shape if encoder_hidden_states is None else encoder_hidden_states.shape # diffusers/src/diffusers/models/attention_processor.py:1713 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].norm, accessed_by=DictGetItemGuardAccessor(norm) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[20].norm, 140581770775232) # norm_hidden_states, gate = self.norm(hidden_states, emb=temb) # diffusers/src/diffusers/models/transformers/transformer_flux.py:88 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].norm.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[20].norm.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].norm.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[20].norm.training, 140591004393440) # norm_hidden_states, gate = self.norm(hidden_states, emb=temb) # diffusers/src/diffusers/models/transformers/transformer_flux.py:88 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].norm._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].norm.norm, accessed_by=DictGetItemGuardAccessor(norm) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[20].norm.norm, 140581770775376) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:171 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].norm.norm.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].norm.norm.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[20].norm.norm.training, 140591004393440) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:171 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].norm.silu, accessed_by=DictGetItemGuardAccessor(silu) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[20].norm.silu, 140581770775280) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].norm.silu.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].norm.silu.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[20].norm.silu.training, 140591004393440) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].norm.linear, accessed_by=DictGetItemGuardAccessor(linear) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[20].norm.linear, 140581770775328) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].norm.linear.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].norm.linear.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[20].norm.linear.training, 140591004393440) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].norm._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].norm._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].norm._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].norm._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].act_mlp, accessed_by=DictGetItemGuardAccessor(act_mlp) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[20].act_mlp, 140581770775472) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].act_mlp.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].act_mlp.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[20].act_mlp.training, 140591004393440) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].proj_mlp, accessed_by=DictGetItemGuardAccessor(proj_mlp) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[20].proj_mlp, 140581770775424) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].proj_mlp.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].proj_mlp.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[20].proj_mlp.training, 140591004393440) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].proj_out, accessed_by=DictGetItemGuardAccessor(proj_out) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[20].proj_out, 140581770775520) # hidden_states = gate * self.proj_out(hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:98 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].proj_out.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].proj_out.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[20].proj_out.training, 140591004393440) # hidden_states = gate * self.proj_out(hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:98 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21], accessed_by=GetItemGuardAccessor(21) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[21], 140581770775184) # for index_block, block in enumerate(self.single_transformer_blocks): # diffusers/src/diffusers/models/transformers/transformer_flux.py:509 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[21].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[21].training, 140591004393440) # for index_block, block in enumerate(self.single_transformer_blocks): # diffusers/src/diffusers/models/transformers/transformer_flux.py:509 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].attn, accessed_by=DictGetItemGuardAccessor(attn) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[21].attn, 140581770776384) # attn_output = self.attn( # diffusers/src/diffusers/models/transformers/transformer_flux.py:91 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].attn.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[21].attn.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].attn.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[21].attn.training, 140591004393440) # attn_output = self.attn( # diffusers/src/diffusers/models/transformers/transformer_flux.py:91 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].attn._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].attn.to_k, accessed_by=DictGetItemGuardAccessor(to_k) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[21].attn.to_k, 140581770776528) # key = attn.to_k(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1717 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].attn.to_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].attn.to_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[21].attn.to_k.training, 140591004393440) # key = attn.to_k(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1717 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].attn.to_q, accessed_by=DictGetItemGuardAccessor(to_q) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[21].attn.to_q, 140581770776624) # query = attn.to_q(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1716 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].attn.to_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].attn.to_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[21].attn.to_q.training, 140591004393440) # query = attn.to_q(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1716 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].attn.to_v, accessed_by=DictGetItemGuardAccessor(to_v) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[21].attn.to_v, 140581770776672) # value = attn.to_v(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1718 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].attn.to_v.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].attn.to_v.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[21].attn.to_v.training, 140591004393440) # value = attn.to_v(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1718 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].attn.norm_k, accessed_by=DictGetItemGuardAccessor(norm_k) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[21].attn.norm_k, 140581770776576) # if attn.norm_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1729 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].attn.norm_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[21].attn.norm_k.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].attn.norm_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[21].attn.norm_k.training, 140591004393440) # if attn.norm_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1729 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].attn.norm_k.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[21].attn.norm_k.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].attn.norm_k._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].attn.norm_k.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[21].attn.norm_k.weight, 140581772774032) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].attn.norm_k._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].attn.norm_k._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].attn.norm_k._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].attn.norm_k._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].attn.norm_q, accessed_by=DictGetItemGuardAccessor(norm_q) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[21].attn.norm_q, 140581770776432) # if attn.norm_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1727 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].attn.norm_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[21].attn.norm_q.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].attn.norm_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[21].attn.norm_q.training, 140591004393440) # if attn.norm_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1727 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].attn.norm_q.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[21].attn.norm_q.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].attn.norm_q._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].attn.norm_q.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[21].attn.norm_q.weight, 140581772782992) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].attn.norm_q._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].attn.norm_q._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].attn.norm_q._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].attn.norm_q._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].attn.heads, accessed_by=DictGetItemGuardAccessor(heads) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[21].attn.heads == 24 # head_dim = inner_dim // attn.heads # diffusers/src/diffusers/models/attention_processor.py:1721 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].attn.processor, accessed_by=DictGetItemGuardAccessor(processor) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[21].attn.processor, 93831581524080) # attn_parameters = set(inspect.signature(self.processor.__call__).parameters.keys()) # diffusers/src/diffusers/models/attention_processor.py:479 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[21].attn.processor, 140581770776336) # return self.processor( # diffusers/src/diffusers/models/attention_processor.py:490 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].attn._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].attn._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].attn._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].attn._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].attn.forward, accessed_by=GetAttrGuardAccessor(forward) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].attn.forward, accessed_by=FuncDefaultsGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].attn.forward.__defaults__[0], accessed_by=GetItemGuardAccessor(0) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[21].attn.forward.__defaults__[0], 140591004478624) # batch_size, _, _ = hidden_states.shape if encoder_hidden_states is None else encoder_hidden_states.shape # diffusers/src/diffusers/models/attention_processor.py:1713 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].norm, accessed_by=DictGetItemGuardAccessor(norm) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[21].norm, 140581770776000) # norm_hidden_states, gate = self.norm(hidden_states, emb=temb) # diffusers/src/diffusers/models/transformers/transformer_flux.py:88 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].norm.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[21].norm.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].norm.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[21].norm.training, 140591004393440) # norm_hidden_states, gate = self.norm(hidden_states, emb=temb) # diffusers/src/diffusers/models/transformers/transformer_flux.py:88 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].norm._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].norm.norm, accessed_by=DictGetItemGuardAccessor(norm) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[21].norm.norm, 140581770776144) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:171 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].norm.norm.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].norm.norm.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[21].norm.norm.training, 140591004393440) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:171 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].norm.silu, accessed_by=DictGetItemGuardAccessor(silu) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[21].norm.silu, 140581770776048) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].norm.silu.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].norm.silu.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[21].norm.silu.training, 140591004393440) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].norm.linear, accessed_by=DictGetItemGuardAccessor(linear) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[21].norm.linear, 140581770776096) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].norm.linear.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].norm.linear.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[21].norm.linear.training, 140591004393440) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].norm._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].norm._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].norm._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].norm._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].act_mlp, accessed_by=DictGetItemGuardAccessor(act_mlp) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[21].act_mlp, 140581770776240) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].act_mlp.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].act_mlp.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[21].act_mlp.training, 140591004393440) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].proj_mlp, accessed_by=DictGetItemGuardAccessor(proj_mlp) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[21].proj_mlp, 140581770776192) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].proj_mlp.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].proj_mlp.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[21].proj_mlp.training, 140591004393440) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].proj_out, accessed_by=DictGetItemGuardAccessor(proj_out) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[21].proj_out, 140581770776288) # hidden_states = gate * self.proj_out(hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:98 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].proj_out.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].proj_out.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[21].proj_out.training, 140591004393440) # hidden_states = gate * self.proj_out(hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:98 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22], accessed_by=GetItemGuardAccessor(22) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[22], 140581770775952) # for index_block, block in enumerate(self.single_transformer_blocks): # diffusers/src/diffusers/models/transformers/transformer_flux.py:509 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[22].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[22].training, 140591004393440) # for index_block, block in enumerate(self.single_transformer_blocks): # diffusers/src/diffusers/models/transformers/transformer_flux.py:509 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].attn, accessed_by=DictGetItemGuardAccessor(attn) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[22].attn, 140581770777152) # attn_output = self.attn( # diffusers/src/diffusers/models/transformers/transformer_flux.py:91 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].attn.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[22].attn.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].attn.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[22].attn.training, 140591004393440) # attn_output = self.attn( # diffusers/src/diffusers/models/transformers/transformer_flux.py:91 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].attn._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].attn.to_k, accessed_by=DictGetItemGuardAccessor(to_k) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[22].attn.to_k, 140581770777296) # key = attn.to_k(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1717 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].attn.to_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].attn.to_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[22].attn.to_k.training, 140591004393440) # key = attn.to_k(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1717 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].attn.to_q, accessed_by=DictGetItemGuardAccessor(to_q) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[22].attn.to_q, 140581770777392) # query = attn.to_q(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1716 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].attn.to_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].attn.to_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[22].attn.to_q.training, 140591004393440) # query = attn.to_q(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1716 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].attn.to_v, accessed_by=DictGetItemGuardAccessor(to_v) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[22].attn.to_v, 140581770777440) # value = attn.to_v(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1718 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].attn.to_v.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].attn.to_v.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[22].attn.to_v.training, 140591004393440) # value = attn.to_v(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1718 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].attn.norm_k, accessed_by=DictGetItemGuardAccessor(norm_k) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[22].attn.norm_k, 140581770777344) # if attn.norm_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1729 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].attn.norm_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[22].attn.norm_k.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].attn.norm_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[22].attn.norm_k.training, 140591004393440) # if attn.norm_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1729 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].attn.norm_k.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[22].attn.norm_k.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].attn.norm_k._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].attn.norm_k.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[22].attn.norm_k.weight, 140581772783952) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].attn.norm_k._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].attn.norm_k._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].attn.norm_k._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].attn.norm_k._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].attn.norm_q, accessed_by=DictGetItemGuardAccessor(norm_q) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[22].attn.norm_q, 140581770777200) # if attn.norm_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1727 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].attn.norm_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[22].attn.norm_q.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].attn.norm_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[22].attn.norm_q.training, 140591004393440) # if attn.norm_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1727 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].attn.norm_q.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[22].attn.norm_q.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].attn.norm_q._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].attn.norm_q.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[22].attn.norm_q.weight, 140581773260208) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].attn.norm_q._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].attn.norm_q._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].attn.norm_q._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].attn.norm_q._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].attn.heads, accessed_by=DictGetItemGuardAccessor(heads) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[22].attn.heads == 24 # head_dim = inner_dim // attn.heads # diffusers/src/diffusers/models/attention_processor.py:1721 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].attn.processor, accessed_by=DictGetItemGuardAccessor(processor) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[22].attn.processor, 93831581524080) # attn_parameters = set(inspect.signature(self.processor.__call__).parameters.keys()) # diffusers/src/diffusers/models/attention_processor.py:479 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[22].attn.processor, 140581770777104) # return self.processor( # diffusers/src/diffusers/models/attention_processor.py:490 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].attn._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].attn._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].attn._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].attn._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].attn.forward, accessed_by=GetAttrGuardAccessor(forward) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].attn.forward, accessed_by=FuncDefaultsGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].attn.forward.__defaults__[0], accessed_by=GetItemGuardAccessor(0) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[22].attn.forward.__defaults__[0], 140591004478624) # batch_size, _, _ = hidden_states.shape if encoder_hidden_states is None else encoder_hidden_states.shape # diffusers/src/diffusers/models/attention_processor.py:1713 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].norm, accessed_by=DictGetItemGuardAccessor(norm) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[22].norm, 140581770776768) # norm_hidden_states, gate = self.norm(hidden_states, emb=temb) # diffusers/src/diffusers/models/transformers/transformer_flux.py:88 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].norm.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[22].norm.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].norm.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[22].norm.training, 140591004393440) # norm_hidden_states, gate = self.norm(hidden_states, emb=temb) # diffusers/src/diffusers/models/transformers/transformer_flux.py:88 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].norm._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].norm.norm, accessed_by=DictGetItemGuardAccessor(norm) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[22].norm.norm, 140581770776912) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:171 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].norm.norm.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].norm.norm.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[22].norm.norm.training, 140591004393440) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:171 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].norm.silu, accessed_by=DictGetItemGuardAccessor(silu) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[22].norm.silu, 140581770776816) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].norm.silu.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].norm.silu.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[22].norm.silu.training, 140591004393440) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].norm.linear, accessed_by=DictGetItemGuardAccessor(linear) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[22].norm.linear, 140581770776864) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].norm.linear.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].norm.linear.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[22].norm.linear.training, 140591004393440) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].norm._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].norm._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].norm._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].norm._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].act_mlp, accessed_by=DictGetItemGuardAccessor(act_mlp) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[22].act_mlp, 140581770777008) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].act_mlp.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].act_mlp.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[22].act_mlp.training, 140591004393440) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].proj_mlp, accessed_by=DictGetItemGuardAccessor(proj_mlp) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[22].proj_mlp, 140581770776960) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].proj_mlp.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].proj_mlp.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[22].proj_mlp.training, 140591004393440) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].proj_out, accessed_by=DictGetItemGuardAccessor(proj_out) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[22].proj_out, 140581770777056) # hidden_states = gate * self.proj_out(hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:98 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].proj_out.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].proj_out.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[22].proj_out.training, 140591004393440) # hidden_states = gate * self.proj_out(hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:98 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23], accessed_by=GetItemGuardAccessor(23) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[23], 140581770776720) # for index_block, block in enumerate(self.single_transformer_blocks): # diffusers/src/diffusers/models/transformers/transformer_flux.py:509 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[23].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[23].training, 140591004393440) # for index_block, block in enumerate(self.single_transformer_blocks): # diffusers/src/diffusers/models/transformers/transformer_flux.py:509 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].attn, accessed_by=DictGetItemGuardAccessor(attn) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[23].attn, 140581770777920) # attn_output = self.attn( # diffusers/src/diffusers/models/transformers/transformer_flux.py:91 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].attn.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[23].attn.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].attn.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[23].attn.training, 140591004393440) # attn_output = self.attn( # diffusers/src/diffusers/models/transformers/transformer_flux.py:91 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].attn._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].attn.to_k, accessed_by=DictGetItemGuardAccessor(to_k) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[23].attn.to_k, 140581770778064) # key = attn.to_k(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1717 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].attn.to_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].attn.to_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[23].attn.to_k.training, 140591004393440) # key = attn.to_k(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1717 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].attn.to_q, accessed_by=DictGetItemGuardAccessor(to_q) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[23].attn.to_q, 140581770778160) # query = attn.to_q(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1716 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].attn.to_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].attn.to_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[23].attn.to_q.training, 140591004393440) # query = attn.to_q(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1716 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].attn.to_v, accessed_by=DictGetItemGuardAccessor(to_v) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[23].attn.to_v, 140581770778208) # value = attn.to_v(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1718 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].attn.to_v.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].attn.to_v.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[23].attn.to_v.training, 140591004393440) # value = attn.to_v(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1718 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].attn.norm_k, accessed_by=DictGetItemGuardAccessor(norm_k) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[23].attn.norm_k, 140581770778112) # if attn.norm_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1729 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].attn.norm_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[23].attn.norm_k.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].attn.norm_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[23].attn.norm_k.training, 140591004393440) # if attn.norm_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1729 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].attn.norm_k.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[23].attn.norm_k.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].attn.norm_k._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].attn.norm_k.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[23].attn.norm_k.weight, 140581765132864) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].attn.norm_k._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].attn.norm_k._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].attn.norm_k._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].attn.norm_k._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].attn.norm_q, accessed_by=DictGetItemGuardAccessor(norm_q) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[23].attn.norm_q, 140581770777968) # if attn.norm_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1727 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].attn.norm_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[23].attn.norm_q.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].attn.norm_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[23].attn.norm_q.training, 140591004393440) # if attn.norm_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1727 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].attn.norm_q.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[23].attn.norm_q.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].attn.norm_q._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].attn.norm_q.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[23].attn.norm_q.weight, 140581772776352) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].attn.norm_q._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].attn.norm_q._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].attn.norm_q._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].attn.norm_q._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].attn.heads, accessed_by=DictGetItemGuardAccessor(heads) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[23].attn.heads == 24 # head_dim = inner_dim // attn.heads # diffusers/src/diffusers/models/attention_processor.py:1721 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].attn.processor, accessed_by=DictGetItemGuardAccessor(processor) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[23].attn.processor, 93831581524080) # attn_parameters = set(inspect.signature(self.processor.__call__).parameters.keys()) # diffusers/src/diffusers/models/attention_processor.py:479 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[23].attn.processor, 140581770777872) # return self.processor( # diffusers/src/diffusers/models/attention_processor.py:490 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].attn._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].attn._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].attn._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].attn._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].attn.forward, accessed_by=GetAttrGuardAccessor(forward) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].attn.forward, accessed_by=FuncDefaultsGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].attn.forward.__defaults__[0], accessed_by=GetItemGuardAccessor(0) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[23].attn.forward.__defaults__[0], 140591004478624) # batch_size, _, _ = hidden_states.shape if encoder_hidden_states is None else encoder_hidden_states.shape # diffusers/src/diffusers/models/attention_processor.py:1713 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].norm, accessed_by=DictGetItemGuardAccessor(norm) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[23].norm, 140581770777536) # norm_hidden_states, gate = self.norm(hidden_states, emb=temb) # diffusers/src/diffusers/models/transformers/transformer_flux.py:88 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].norm.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[23].norm.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].norm.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[23].norm.training, 140591004393440) # norm_hidden_states, gate = self.norm(hidden_states, emb=temb) # diffusers/src/diffusers/models/transformers/transformer_flux.py:88 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].norm._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].norm.norm, accessed_by=DictGetItemGuardAccessor(norm) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[23].norm.norm, 140581770777680) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:171 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].norm.norm.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].norm.norm.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[23].norm.norm.training, 140591004393440) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:171 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].norm.silu, accessed_by=DictGetItemGuardAccessor(silu) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[23].norm.silu, 140581770777584) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].norm.silu.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].norm.silu.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[23].norm.silu.training, 140591004393440) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].norm.linear, accessed_by=DictGetItemGuardAccessor(linear) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[23].norm.linear, 140581770777632) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].norm.linear.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].norm.linear.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[23].norm.linear.training, 140591004393440) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].norm._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].norm._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].norm._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].norm._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].act_mlp, accessed_by=DictGetItemGuardAccessor(act_mlp) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[23].act_mlp, 140581770777776) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].act_mlp.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].act_mlp.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[23].act_mlp.training, 140591004393440) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].proj_mlp, accessed_by=DictGetItemGuardAccessor(proj_mlp) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[23].proj_mlp, 140581770777728) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].proj_mlp.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].proj_mlp.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[23].proj_mlp.training, 140591004393440) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].proj_out, accessed_by=DictGetItemGuardAccessor(proj_out) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[23].proj_out, 140581770777824) # hidden_states = gate * self.proj_out(hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:98 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].proj_out.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].proj_out.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[23].proj_out.training, 140591004393440) # hidden_states = gate * self.proj_out(hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:98 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24], accessed_by=GetItemGuardAccessor(24) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[24], 140581770777488) # for index_block, block in enumerate(self.single_transformer_blocks): # diffusers/src/diffusers/models/transformers/transformer_flux.py:509 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[24].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[24].training, 140591004393440) # for index_block, block in enumerate(self.single_transformer_blocks): # diffusers/src/diffusers/models/transformers/transformer_flux.py:509 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].attn, accessed_by=DictGetItemGuardAccessor(attn) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[24].attn, 140581770778688) # attn_output = self.attn( # diffusers/src/diffusers/models/transformers/transformer_flux.py:91 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].attn.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[24].attn.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].attn.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[24].attn.training, 140591004393440) # attn_output = self.attn( # diffusers/src/diffusers/models/transformers/transformer_flux.py:91 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].attn._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].attn.to_k, accessed_by=DictGetItemGuardAccessor(to_k) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[24].attn.to_k, 140581770778832) # key = attn.to_k(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1717 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].attn.to_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].attn.to_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[24].attn.to_k.training, 140591004393440) # key = attn.to_k(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1717 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].attn.to_q, accessed_by=DictGetItemGuardAccessor(to_q) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[24].attn.to_q, 140581770778928) # query = attn.to_q(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1716 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].attn.to_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].attn.to_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[24].attn.to_q.training, 140591004393440) # query = attn.to_q(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1716 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].attn.to_v, accessed_by=DictGetItemGuardAccessor(to_v) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[24].attn.to_v, 140581770778976) # value = attn.to_v(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1718 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].attn.to_v.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].attn.to_v.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[24].attn.to_v.training, 140591004393440) # value = attn.to_v(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1718 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].attn.norm_k, accessed_by=DictGetItemGuardAccessor(norm_k) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[24].attn.norm_k, 140581770778880) # if attn.norm_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1729 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].attn.norm_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[24].attn.norm_k.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].attn.norm_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[24].attn.norm_k.training, 140591004393440) # if attn.norm_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1729 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].attn.norm_k.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[24].attn.norm_k.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].attn.norm_k._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].attn.norm_k.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[24].attn.norm_k.weight, 140581771019104) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].attn.norm_k._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].attn.norm_k._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].attn.norm_k._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].attn.norm_k._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].attn.norm_q, accessed_by=DictGetItemGuardAccessor(norm_q) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[24].attn.norm_q, 140581770778736) # if attn.norm_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1727 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].attn.norm_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[24].attn.norm_q.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].attn.norm_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[24].attn.norm_q.training, 140591004393440) # if attn.norm_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1727 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].attn.norm_q.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[24].attn.norm_q.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].attn.norm_q._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].attn.norm_q.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[24].attn.norm_q.weight, 140581773261248) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].attn.norm_q._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].attn.norm_q._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].attn.norm_q._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].attn.norm_q._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].attn.heads, accessed_by=DictGetItemGuardAccessor(heads) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[24].attn.heads == 24 # head_dim = inner_dim // attn.heads # diffusers/src/diffusers/models/attention_processor.py:1721 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].attn.processor, accessed_by=DictGetItemGuardAccessor(processor) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[24].attn.processor, 93831581524080) # attn_parameters = set(inspect.signature(self.processor.__call__).parameters.keys()) # diffusers/src/diffusers/models/attention_processor.py:479 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[24].attn.processor, 140581770778640) # return self.processor( # diffusers/src/diffusers/models/attention_processor.py:490 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].attn._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].attn._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].attn._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].attn._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].attn.forward, accessed_by=GetAttrGuardAccessor(forward) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].attn.forward, accessed_by=FuncDefaultsGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].attn.forward.__defaults__[0], accessed_by=GetItemGuardAccessor(0) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[24].attn.forward.__defaults__[0], 140591004478624) # batch_size, _, _ = hidden_states.shape if encoder_hidden_states is None else encoder_hidden_states.shape # diffusers/src/diffusers/models/attention_processor.py:1713 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].norm, accessed_by=DictGetItemGuardAccessor(norm) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[24].norm, 140581770778304) # norm_hidden_states, gate = self.norm(hidden_states, emb=temb) # diffusers/src/diffusers/models/transformers/transformer_flux.py:88 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].norm.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[24].norm.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].norm.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[24].norm.training, 140591004393440) # norm_hidden_states, gate = self.norm(hidden_states, emb=temb) # diffusers/src/diffusers/models/transformers/transformer_flux.py:88 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].norm._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].norm.norm, accessed_by=DictGetItemGuardAccessor(norm) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[24].norm.norm, 140581770778448) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:171 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].norm.norm.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].norm.norm.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[24].norm.norm.training, 140591004393440) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:171 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].norm.silu, accessed_by=DictGetItemGuardAccessor(silu) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[24].norm.silu, 140581770778352) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].norm.silu.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].norm.silu.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[24].norm.silu.training, 140591004393440) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].norm.linear, accessed_by=DictGetItemGuardAccessor(linear) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[24].norm.linear, 140581770778400) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].norm.linear.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].norm.linear.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[24].norm.linear.training, 140591004393440) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].norm._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].norm._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].norm._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].norm._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].act_mlp, accessed_by=DictGetItemGuardAccessor(act_mlp) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[24].act_mlp, 140581770778544) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].act_mlp.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].act_mlp.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[24].act_mlp.training, 140591004393440) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].proj_mlp, accessed_by=DictGetItemGuardAccessor(proj_mlp) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[24].proj_mlp, 140581770778496) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].proj_mlp.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].proj_mlp.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[24].proj_mlp.training, 140591004393440) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].proj_out, accessed_by=DictGetItemGuardAccessor(proj_out) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[24].proj_out, 140581770778592) # hidden_states = gate * self.proj_out(hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:98 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].proj_out.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].proj_out.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[24].proj_out.training, 140591004393440) # hidden_states = gate * self.proj_out(hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:98 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25], accessed_by=GetItemGuardAccessor(25) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[25], 140581770778256) # for index_block, block in enumerate(self.single_transformer_blocks): # diffusers/src/diffusers/models/transformers/transformer_flux.py:509 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[25].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[25].training, 140591004393440) # for index_block, block in enumerate(self.single_transformer_blocks): # diffusers/src/diffusers/models/transformers/transformer_flux.py:509 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].attn, accessed_by=DictGetItemGuardAccessor(attn) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[25].attn, 140581770779456) # attn_output = self.attn( # diffusers/src/diffusers/models/transformers/transformer_flux.py:91 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].attn.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[25].attn.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].attn.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[25].attn.training, 140591004393440) # attn_output = self.attn( # diffusers/src/diffusers/models/transformers/transformer_flux.py:91 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].attn._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].attn.to_k, accessed_by=DictGetItemGuardAccessor(to_k) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[25].attn.to_k, 140581770779600) # key = attn.to_k(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1717 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].attn.to_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].attn.to_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[25].attn.to_k.training, 140591004393440) # key = attn.to_k(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1717 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].attn.to_q, accessed_by=DictGetItemGuardAccessor(to_q) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[25].attn.to_q, 140581770779696) # query = attn.to_q(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1716 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].attn.to_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].attn.to_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[25].attn.to_q.training, 140591004393440) # query = attn.to_q(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1716 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].attn.to_v, accessed_by=DictGetItemGuardAccessor(to_v) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[25].attn.to_v, 140581770779744) # value = attn.to_v(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1718 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].attn.to_v.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].attn.to_v.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[25].attn.to_v.training, 140591004393440) # value = attn.to_v(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1718 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].attn.norm_k, accessed_by=DictGetItemGuardAccessor(norm_k) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[25].attn.norm_k, 140581770779648) # if attn.norm_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1729 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].attn.norm_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[25].attn.norm_k.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].attn.norm_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[25].attn.norm_k.training, 140591004393440) # if attn.norm_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1729 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].attn.norm_k.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[25].attn.norm_k.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].attn.norm_k._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].attn.norm_k.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[25].attn.norm_k.weight, 140581773247968) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].attn.norm_k._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].attn.norm_k._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].attn.norm_k._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].attn.norm_k._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].attn.norm_q, accessed_by=DictGetItemGuardAccessor(norm_q) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[25].attn.norm_q, 140581770779504) # if attn.norm_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1727 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].attn.norm_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[25].attn.norm_q.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].attn.norm_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[25].attn.norm_q.training, 140591004393440) # if attn.norm_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1727 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].attn.norm_q.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[25].attn.norm_q.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].attn.norm_q._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].attn.norm_q.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[25].attn.norm_q.weight, 140581765132224) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].attn.norm_q._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].attn.norm_q._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].attn.norm_q._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].attn.norm_q._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].attn.heads, accessed_by=DictGetItemGuardAccessor(heads) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[25].attn.heads == 24 # head_dim = inner_dim // attn.heads # diffusers/src/diffusers/models/attention_processor.py:1721 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].attn.processor, accessed_by=DictGetItemGuardAccessor(processor) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[25].attn.processor, 93831581524080) # attn_parameters = set(inspect.signature(self.processor.__call__).parameters.keys()) # diffusers/src/diffusers/models/attention_processor.py:479 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[25].attn.processor, 140581770779408) # return self.processor( # diffusers/src/diffusers/models/attention_processor.py:490 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].attn._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].attn._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].attn._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].attn._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].attn.forward, accessed_by=GetAttrGuardAccessor(forward) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].attn.forward, accessed_by=FuncDefaultsGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].attn.forward.__defaults__[0], accessed_by=GetItemGuardAccessor(0) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[25].attn.forward.__defaults__[0], 140591004478624) # batch_size, _, _ = hidden_states.shape if encoder_hidden_states is None else encoder_hidden_states.shape # diffusers/src/diffusers/models/attention_processor.py:1713 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].norm, accessed_by=DictGetItemGuardAccessor(norm) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[25].norm, 140581770779072) # norm_hidden_states, gate = self.norm(hidden_states, emb=temb) # diffusers/src/diffusers/models/transformers/transformer_flux.py:88 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].norm.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[25].norm.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].norm.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[25].norm.training, 140591004393440) # norm_hidden_states, gate = self.norm(hidden_states, emb=temb) # diffusers/src/diffusers/models/transformers/transformer_flux.py:88 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].norm._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].norm.norm, accessed_by=DictGetItemGuardAccessor(norm) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[25].norm.norm, 140581770779216) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:171 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].norm.norm.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].norm.norm.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[25].norm.norm.training, 140591004393440) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:171 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].norm.silu, accessed_by=DictGetItemGuardAccessor(silu) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[25].norm.silu, 140581770779120) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].norm.silu.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].norm.silu.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[25].norm.silu.training, 140591004393440) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].norm.linear, accessed_by=DictGetItemGuardAccessor(linear) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[25].norm.linear, 140581770779168) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].norm.linear.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].norm.linear.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[25].norm.linear.training, 140591004393440) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].norm._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].norm._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].norm._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].norm._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].act_mlp, accessed_by=DictGetItemGuardAccessor(act_mlp) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[25].act_mlp, 140581770779312) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].act_mlp.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].act_mlp.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[25].act_mlp.training, 140591004393440) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].proj_mlp, accessed_by=DictGetItemGuardAccessor(proj_mlp) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[25].proj_mlp, 140581770779264) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].proj_mlp.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].proj_mlp.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[25].proj_mlp.training, 140591004393440) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].proj_out, accessed_by=DictGetItemGuardAccessor(proj_out) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[25].proj_out, 140581770779360) # hidden_states = gate * self.proj_out(hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:98 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].proj_out.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].proj_out.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[25].proj_out.training, 140591004393440) # hidden_states = gate * self.proj_out(hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:98 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26], accessed_by=GetItemGuardAccessor(26) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[26], 140581770779024) # for index_block, block in enumerate(self.single_transformer_blocks): # diffusers/src/diffusers/models/transformers/transformer_flux.py:509 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[26].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[26].training, 140591004393440) # for index_block, block in enumerate(self.single_transformer_blocks): # diffusers/src/diffusers/models/transformers/transformer_flux.py:509 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].attn, accessed_by=DictGetItemGuardAccessor(attn) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[26].attn, 140581770780224) # attn_output = self.attn( # diffusers/src/diffusers/models/transformers/transformer_flux.py:91 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].attn.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[26].attn.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].attn.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[26].attn.training, 140591004393440) # attn_output = self.attn( # diffusers/src/diffusers/models/transformers/transformer_flux.py:91 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].attn._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].attn.to_k, accessed_by=DictGetItemGuardAccessor(to_k) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[26].attn.to_k, 140581770780368) # key = attn.to_k(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1717 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].attn.to_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].attn.to_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[26].attn.to_k.training, 140591004393440) # key = attn.to_k(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1717 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].attn.to_q, accessed_by=DictGetItemGuardAccessor(to_q) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[26].attn.to_q, 140581770780464) # query = attn.to_q(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1716 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].attn.to_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].attn.to_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[26].attn.to_q.training, 140591004393440) # query = attn.to_q(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1716 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].attn.to_v, accessed_by=DictGetItemGuardAccessor(to_v) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[26].attn.to_v, 140581770780512) # value = attn.to_v(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1718 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].attn.to_v.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].attn.to_v.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[26].attn.to_v.training, 140591004393440) # value = attn.to_v(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1718 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].attn.norm_k, accessed_by=DictGetItemGuardAccessor(norm_k) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[26].attn.norm_k, 140581770780416) # if attn.norm_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1729 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].attn.norm_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[26].attn.norm_k.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].attn.norm_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[26].attn.norm_k.training, 140591004393440) # if attn.norm_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1729 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].attn.norm_k.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[26].attn.norm_k.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].attn.norm_k._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].attn.norm_k.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[26].attn.norm_k.weight, 140581783341632) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].attn.norm_k._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].attn.norm_k._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].attn.norm_k._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].attn.norm_k._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].attn.norm_q, accessed_by=DictGetItemGuardAccessor(norm_q) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[26].attn.norm_q, 140581770780272) # if attn.norm_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1727 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].attn.norm_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[26].attn.norm_q.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].attn.norm_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[26].attn.norm_q.training, 140591004393440) # if attn.norm_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1727 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].attn.norm_q.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[26].attn.norm_q.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].attn.norm_q._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].attn.norm_q.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[26].attn.norm_q.weight, 140581773250448) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].attn.norm_q._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].attn.norm_q._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].attn.norm_q._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].attn.norm_q._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].attn.heads, accessed_by=DictGetItemGuardAccessor(heads) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[26].attn.heads == 24 # head_dim = inner_dim // attn.heads # diffusers/src/diffusers/models/attention_processor.py:1721 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].attn.processor, accessed_by=DictGetItemGuardAccessor(processor) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[26].attn.processor, 93831581524080) # attn_parameters = set(inspect.signature(self.processor.__call__).parameters.keys()) # diffusers/src/diffusers/models/attention_processor.py:479 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[26].attn.processor, 140581770780176) # return self.processor( # diffusers/src/diffusers/models/attention_processor.py:490 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].attn._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].attn._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].attn._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].attn._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].attn.forward, accessed_by=GetAttrGuardAccessor(forward) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].attn.forward, accessed_by=FuncDefaultsGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].attn.forward.__defaults__[0], accessed_by=GetItemGuardAccessor(0) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[26].attn.forward.__defaults__[0], 140591004478624) # batch_size, _, _ = hidden_states.shape if encoder_hidden_states is None else encoder_hidden_states.shape # diffusers/src/diffusers/models/attention_processor.py:1713 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].norm, accessed_by=DictGetItemGuardAccessor(norm) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[26].norm, 140581770779840) # norm_hidden_states, gate = self.norm(hidden_states, emb=temb) # diffusers/src/diffusers/models/transformers/transformer_flux.py:88 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].norm.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[26].norm.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].norm.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[26].norm.training, 140591004393440) # norm_hidden_states, gate = self.norm(hidden_states, emb=temb) # diffusers/src/diffusers/models/transformers/transformer_flux.py:88 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].norm._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].norm.norm, accessed_by=DictGetItemGuardAccessor(norm) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[26].norm.norm, 140581770779984) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:171 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].norm.norm.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].norm.norm.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[26].norm.norm.training, 140591004393440) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:171 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].norm.silu, accessed_by=DictGetItemGuardAccessor(silu) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[26].norm.silu, 140581770779888) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].norm.silu.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].norm.silu.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[26].norm.silu.training, 140591004393440) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].norm.linear, accessed_by=DictGetItemGuardAccessor(linear) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[26].norm.linear, 140581770779936) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].norm.linear.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].norm.linear.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[26].norm.linear.training, 140591004393440) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].norm._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].norm._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].norm._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].norm._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].act_mlp, accessed_by=DictGetItemGuardAccessor(act_mlp) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[26].act_mlp, 140581770780080) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].act_mlp.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].act_mlp.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[26].act_mlp.training, 140591004393440) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].proj_mlp, accessed_by=DictGetItemGuardAccessor(proj_mlp) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[26].proj_mlp, 140581770780032) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].proj_mlp.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].proj_mlp.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[26].proj_mlp.training, 140591004393440) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].proj_out, accessed_by=DictGetItemGuardAccessor(proj_out) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[26].proj_out, 140581770780128) # hidden_states = gate * self.proj_out(hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:98 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].proj_out.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].proj_out.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[26].proj_out.training, 140591004393440) # hidden_states = gate * self.proj_out(hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:98 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27], accessed_by=GetItemGuardAccessor(27) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[27], 140581770779792) # for index_block, block in enumerate(self.single_transformer_blocks): # diffusers/src/diffusers/models/transformers/transformer_flux.py:509 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[27].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[27].training, 140591004393440) # for index_block, block in enumerate(self.single_transformer_blocks): # diffusers/src/diffusers/models/transformers/transformer_flux.py:509 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].attn, accessed_by=DictGetItemGuardAccessor(attn) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[27].attn, 140581770780992) # attn_output = self.attn( # diffusers/src/diffusers/models/transformers/transformer_flux.py:91 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].attn.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[27].attn.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].attn.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[27].attn.training, 140591004393440) # attn_output = self.attn( # diffusers/src/diffusers/models/transformers/transformer_flux.py:91 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].attn._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].attn.to_k, accessed_by=DictGetItemGuardAccessor(to_k) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[27].attn.to_k, 140581770781136) # key = attn.to_k(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1717 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].attn.to_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].attn.to_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[27].attn.to_k.training, 140591004393440) # key = attn.to_k(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1717 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].attn.to_q, accessed_by=DictGetItemGuardAccessor(to_q) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[27].attn.to_q, 140581770781232) # query = attn.to_q(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1716 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].attn.to_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].attn.to_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[27].attn.to_q.training, 140591004393440) # query = attn.to_q(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1716 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].attn.to_v, accessed_by=DictGetItemGuardAccessor(to_v) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[27].attn.to_v, 140581770781280) # value = attn.to_v(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1718 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].attn.to_v.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].attn.to_v.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[27].attn.to_v.training, 140591004393440) # value = attn.to_v(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1718 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].attn.norm_k, accessed_by=DictGetItemGuardAccessor(norm_k) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[27].attn.norm_k, 140581770781184) # if attn.norm_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1729 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].attn.norm_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[27].attn.norm_k.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].attn.norm_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[27].attn.norm_k.training, 140591004393440) # if attn.norm_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1729 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].attn.norm_k.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[27].attn.norm_k.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].attn.norm_k._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].attn.norm_k.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[27].attn.norm_k.weight, 140581773250208) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].attn.norm_k._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].attn.norm_k._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].attn.norm_k._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].attn.norm_k._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].attn.norm_q, accessed_by=DictGetItemGuardAccessor(norm_q) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[27].attn.norm_q, 140581770781040) # if attn.norm_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1727 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].attn.norm_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[27].attn.norm_q.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].attn.norm_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[27].attn.norm_q.training, 140591004393440) # if attn.norm_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1727 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].attn.norm_q.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[27].attn.norm_q.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].attn.norm_q._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].attn.norm_q.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[27].attn.norm_q.weight, 140581771022304) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].attn.norm_q._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].attn.norm_q._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].attn.norm_q._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].attn.norm_q._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].attn.heads, accessed_by=DictGetItemGuardAccessor(heads) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[27].attn.heads == 24 # head_dim = inner_dim // attn.heads # diffusers/src/diffusers/models/attention_processor.py:1721 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].attn.processor, accessed_by=DictGetItemGuardAccessor(processor) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[27].attn.processor, 93831581524080) # attn_parameters = set(inspect.signature(self.processor.__call__).parameters.keys()) # diffusers/src/diffusers/models/attention_processor.py:479 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[27].attn.processor, 140581770780944) # return self.processor( # diffusers/src/diffusers/models/attention_processor.py:490 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].attn._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].attn._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].attn._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].attn._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].attn.forward, accessed_by=GetAttrGuardAccessor(forward) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].attn.forward, accessed_by=FuncDefaultsGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].attn.forward.__defaults__[0], accessed_by=GetItemGuardAccessor(0) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[27].attn.forward.__defaults__[0], 140591004478624) # batch_size, _, _ = hidden_states.shape if encoder_hidden_states is None else encoder_hidden_states.shape # diffusers/src/diffusers/models/attention_processor.py:1713 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].norm, accessed_by=DictGetItemGuardAccessor(norm) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[27].norm, 140581770780608) # norm_hidden_states, gate = self.norm(hidden_states, emb=temb) # diffusers/src/diffusers/models/transformers/transformer_flux.py:88 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].norm.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[27].norm.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].norm.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[27].norm.training, 140591004393440) # norm_hidden_states, gate = self.norm(hidden_states, emb=temb) # diffusers/src/diffusers/models/transformers/transformer_flux.py:88 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].norm._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].norm.norm, accessed_by=DictGetItemGuardAccessor(norm) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[27].norm.norm, 140581770780752) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:171 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].norm.norm.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].norm.norm.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[27].norm.norm.training, 140591004393440) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:171 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].norm.silu, accessed_by=DictGetItemGuardAccessor(silu) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[27].norm.silu, 140581770780656) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].norm.silu.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].norm.silu.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[27].norm.silu.training, 140591004393440) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].norm.linear, accessed_by=DictGetItemGuardAccessor(linear) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[27].norm.linear, 140581770780704) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].norm.linear.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].norm.linear.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[27].norm.linear.training, 140591004393440) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].norm._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].norm._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].norm._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].norm._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].act_mlp, accessed_by=DictGetItemGuardAccessor(act_mlp) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[27].act_mlp, 140581770780848) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].act_mlp.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].act_mlp.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[27].act_mlp.training, 140591004393440) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].proj_mlp, accessed_by=DictGetItemGuardAccessor(proj_mlp) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[27].proj_mlp, 140581770780800) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].proj_mlp.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].proj_mlp.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[27].proj_mlp.training, 140591004393440) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].proj_out, accessed_by=DictGetItemGuardAccessor(proj_out) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[27].proj_out, 140581770780896) # hidden_states = gate * self.proj_out(hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:98 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].proj_out.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].proj_out.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[27].proj_out.training, 140591004393440) # hidden_states = gate * self.proj_out(hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:98 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28], accessed_by=GetItemGuardAccessor(28) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[28], 140581770780560) # for index_block, block in enumerate(self.single_transformer_blocks): # diffusers/src/diffusers/models/transformers/transformer_flux.py:509 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[28].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[28].training, 140591004393440) # for index_block, block in enumerate(self.single_transformer_blocks): # diffusers/src/diffusers/models/transformers/transformer_flux.py:509 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].attn, accessed_by=DictGetItemGuardAccessor(attn) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[28].attn, 140581770781760) # attn_output = self.attn( # diffusers/src/diffusers/models/transformers/transformer_flux.py:91 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].attn.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[28].attn.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].attn.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[28].attn.training, 140591004393440) # attn_output = self.attn( # diffusers/src/diffusers/models/transformers/transformer_flux.py:91 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].attn._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].attn.to_k, accessed_by=DictGetItemGuardAccessor(to_k) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[28].attn.to_k, 140581770781904) # key = attn.to_k(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1717 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].attn.to_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].attn.to_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[28].attn.to_k.training, 140591004393440) # key = attn.to_k(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1717 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].attn.to_q, accessed_by=DictGetItemGuardAccessor(to_q) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[28].attn.to_q, 140581770782000) # query = attn.to_q(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1716 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].attn.to_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].attn.to_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[28].attn.to_q.training, 140591004393440) # query = attn.to_q(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1716 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].attn.to_v, accessed_by=DictGetItemGuardAccessor(to_v) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[28].attn.to_v, 140581770782048) # value = attn.to_v(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1718 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].attn.to_v.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].attn.to_v.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[28].attn.to_v.training, 140591004393440) # value = attn.to_v(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1718 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].attn.norm_k, accessed_by=DictGetItemGuardAccessor(norm_k) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[28].attn.norm_k, 140581770781952) # if attn.norm_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1729 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].attn.norm_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[28].attn.norm_k.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].attn.norm_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[28].attn.norm_k.training, 140591004393440) # if attn.norm_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1729 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].attn.norm_k.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[28].attn.norm_k.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].attn.norm_k._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].attn.norm_k.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[28].attn.norm_k.weight, 140581765880688) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].attn.norm_k._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].attn.norm_k._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].attn.norm_k._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].attn.norm_k._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].attn.norm_q, accessed_by=DictGetItemGuardAccessor(norm_q) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[28].attn.norm_q, 140581770781808) # if attn.norm_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1727 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].attn.norm_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[28].attn.norm_q.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].attn.norm_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[28].attn.norm_q.training, 140591004393440) # if attn.norm_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1727 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].attn.norm_q.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[28].attn.norm_q.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].attn.norm_q._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].attn.norm_q.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[28].attn.norm_q.weight, 140581783345232) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].attn.norm_q._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].attn.norm_q._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].attn.norm_q._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].attn.norm_q._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].attn.heads, accessed_by=DictGetItemGuardAccessor(heads) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[28].attn.heads == 24 # head_dim = inner_dim // attn.heads # diffusers/src/diffusers/models/attention_processor.py:1721 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].attn.processor, accessed_by=DictGetItemGuardAccessor(processor) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[28].attn.processor, 93831581524080) # attn_parameters = set(inspect.signature(self.processor.__call__).parameters.keys()) # diffusers/src/diffusers/models/attention_processor.py:479 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[28].attn.processor, 140581770781712) # return self.processor( # diffusers/src/diffusers/models/attention_processor.py:490 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].attn._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].attn._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].attn._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].attn._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].attn.forward, accessed_by=GetAttrGuardAccessor(forward) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].attn.forward, accessed_by=FuncDefaultsGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].attn.forward.__defaults__[0], accessed_by=GetItemGuardAccessor(0) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[28].attn.forward.__defaults__[0], 140591004478624) # batch_size, _, _ = hidden_states.shape if encoder_hidden_states is None else encoder_hidden_states.shape # diffusers/src/diffusers/models/attention_processor.py:1713 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].norm, accessed_by=DictGetItemGuardAccessor(norm) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[28].norm, 140581770781376) # norm_hidden_states, gate = self.norm(hidden_states, emb=temb) # diffusers/src/diffusers/models/transformers/transformer_flux.py:88 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].norm.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[28].norm.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].norm.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[28].norm.training, 140591004393440) # norm_hidden_states, gate = self.norm(hidden_states, emb=temb) # diffusers/src/diffusers/models/transformers/transformer_flux.py:88 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].norm._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].norm.norm, accessed_by=DictGetItemGuardAccessor(norm) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[28].norm.norm, 140581770781520) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:171 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].norm.norm.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].norm.norm.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[28].norm.norm.training, 140591004393440) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:171 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].norm.silu, accessed_by=DictGetItemGuardAccessor(silu) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[28].norm.silu, 140581770781424) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].norm.silu.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].norm.silu.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[28].norm.silu.training, 140591004393440) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].norm.linear, accessed_by=DictGetItemGuardAccessor(linear) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[28].norm.linear, 140581770781472) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].norm.linear.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].norm.linear.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[28].norm.linear.training, 140591004393440) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].norm._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].norm._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].norm._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].norm._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].act_mlp, accessed_by=DictGetItemGuardAccessor(act_mlp) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[28].act_mlp, 140581770781616) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].act_mlp.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].act_mlp.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[28].act_mlp.training, 140591004393440) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].proj_mlp, accessed_by=DictGetItemGuardAccessor(proj_mlp) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[28].proj_mlp, 140581770781568) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].proj_mlp.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].proj_mlp.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[28].proj_mlp.training, 140591004393440) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].proj_out, accessed_by=DictGetItemGuardAccessor(proj_out) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[28].proj_out, 140581770781664) # hidden_states = gate * self.proj_out(hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:98 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].proj_out.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].proj_out.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[28].proj_out.training, 140591004393440) # hidden_states = gate * self.proj_out(hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:98 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29], accessed_by=GetItemGuardAccessor(29) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[29], 140581770781328) # for index_block, block in enumerate(self.single_transformer_blocks): # diffusers/src/diffusers/models/transformers/transformer_flux.py:509 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[29].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[29].training, 140591004393440) # for index_block, block in enumerate(self.single_transformer_blocks): # diffusers/src/diffusers/models/transformers/transformer_flux.py:509 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].attn, accessed_by=DictGetItemGuardAccessor(attn) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[29].attn, 140581770782528) # attn_output = self.attn( # diffusers/src/diffusers/models/transformers/transformer_flux.py:91 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].attn.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[29].attn.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].attn.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[29].attn.training, 140591004393440) # attn_output = self.attn( # diffusers/src/diffusers/models/transformers/transformer_flux.py:91 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].attn._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].attn.to_k, accessed_by=DictGetItemGuardAccessor(to_k) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[29].attn.to_k, 140581770782672) # key = attn.to_k(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1717 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].attn.to_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].attn.to_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[29].attn.to_k.training, 140591004393440) # key = attn.to_k(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1717 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].attn.to_q, accessed_by=DictGetItemGuardAccessor(to_q) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[29].attn.to_q, 140581770782768) # query = attn.to_q(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1716 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].attn.to_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].attn.to_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[29].attn.to_q.training, 140591004393440) # query = attn.to_q(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1716 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].attn.to_v, accessed_by=DictGetItemGuardAccessor(to_v) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[29].attn.to_v, 140581770782816) # value = attn.to_v(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1718 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].attn.to_v.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].attn.to_v.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[29].attn.to_v.training, 140591004393440) # value = attn.to_v(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1718 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].attn.norm_k, accessed_by=DictGetItemGuardAccessor(norm_k) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[29].attn.norm_k, 140581770782720) # if attn.norm_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1729 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].attn.norm_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[29].attn.norm_k.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].attn.norm_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[29].attn.norm_k.training, 140591004393440) # if attn.norm_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1729 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].attn.norm_k.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[29].attn.norm_k.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].attn.norm_k._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].attn.norm_k.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[29].attn.norm_k.weight, 140581771023984) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].attn.norm_k._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].attn.norm_k._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].attn.norm_k._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].attn.norm_k._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].attn.norm_q, accessed_by=DictGetItemGuardAccessor(norm_q) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[29].attn.norm_q, 140581770782576) # if attn.norm_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1727 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].attn.norm_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[29].attn.norm_q.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].attn.norm_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[29].attn.norm_q.training, 140591004393440) # if attn.norm_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1727 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].attn.norm_q.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[29].attn.norm_q.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].attn.norm_q._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].attn.norm_q.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[29].attn.norm_q.weight, 140581783346352) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].attn.norm_q._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].attn.norm_q._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].attn.norm_q._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].attn.norm_q._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].attn.heads, accessed_by=DictGetItemGuardAccessor(heads) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[29].attn.heads == 24 # head_dim = inner_dim // attn.heads # diffusers/src/diffusers/models/attention_processor.py:1721 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].attn.processor, accessed_by=DictGetItemGuardAccessor(processor) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[29].attn.processor, 93831581524080) # attn_parameters = set(inspect.signature(self.processor.__call__).parameters.keys()) # diffusers/src/diffusers/models/attention_processor.py:479 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[29].attn.processor, 140581770782480) # return self.processor( # diffusers/src/diffusers/models/attention_processor.py:490 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].attn._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].attn._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].attn._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].attn._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].attn.forward, accessed_by=GetAttrGuardAccessor(forward) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].attn.forward, accessed_by=FuncDefaultsGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].attn.forward.__defaults__[0], accessed_by=GetItemGuardAccessor(0) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[29].attn.forward.__defaults__[0], 140591004478624) # batch_size, _, _ = hidden_states.shape if encoder_hidden_states is None else encoder_hidden_states.shape # diffusers/src/diffusers/models/attention_processor.py:1713 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].norm, accessed_by=DictGetItemGuardAccessor(norm) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[29].norm, 140581770782144) # norm_hidden_states, gate = self.norm(hidden_states, emb=temb) # diffusers/src/diffusers/models/transformers/transformer_flux.py:88 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].norm.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[29].norm.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].norm.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[29].norm.training, 140591004393440) # norm_hidden_states, gate = self.norm(hidden_states, emb=temb) # diffusers/src/diffusers/models/transformers/transformer_flux.py:88 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].norm._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].norm.norm, accessed_by=DictGetItemGuardAccessor(norm) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[29].norm.norm, 140581770782288) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:171 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].norm.norm.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].norm.norm.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[29].norm.norm.training, 140591004393440) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:171 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].norm.silu, accessed_by=DictGetItemGuardAccessor(silu) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[29].norm.silu, 140581770782192) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].norm.silu.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].norm.silu.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[29].norm.silu.training, 140591004393440) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].norm.linear, accessed_by=DictGetItemGuardAccessor(linear) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[29].norm.linear, 140581770782240) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].norm.linear.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].norm.linear.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[29].norm.linear.training, 140591004393440) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].norm._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].norm._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].norm._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].norm._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].act_mlp, accessed_by=DictGetItemGuardAccessor(act_mlp) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[29].act_mlp, 140581770782384) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].act_mlp.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].act_mlp.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[29].act_mlp.training, 140591004393440) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].proj_mlp, accessed_by=DictGetItemGuardAccessor(proj_mlp) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[29].proj_mlp, 140581770782336) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].proj_mlp.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].proj_mlp.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[29].proj_mlp.training, 140591004393440) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].proj_out, accessed_by=DictGetItemGuardAccessor(proj_out) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[29].proj_out, 140581770782432) # hidden_states = gate * self.proj_out(hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:98 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].proj_out.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].proj_out.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[29].proj_out.training, 140591004393440) # hidden_states = gate * self.proj_out(hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:98 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30], accessed_by=GetItemGuardAccessor(30) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[30], 140581770782096) # for index_block, block in enumerate(self.single_transformer_blocks): # diffusers/src/diffusers/models/transformers/transformer_flux.py:509 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[30].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[30].training, 140591004393440) # for index_block, block in enumerate(self.single_transformer_blocks): # diffusers/src/diffusers/models/transformers/transformer_flux.py:509 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].attn, accessed_by=DictGetItemGuardAccessor(attn) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[30].attn, 140581770783296) # attn_output = self.attn( # diffusers/src/diffusers/models/transformers/transformer_flux.py:91 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].attn.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[30].attn.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].attn.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[30].attn.training, 140591004393440) # attn_output = self.attn( # diffusers/src/diffusers/models/transformers/transformer_flux.py:91 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].attn._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].attn.to_k, accessed_by=DictGetItemGuardAccessor(to_k) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[30].attn.to_k, 140581770783440) # key = attn.to_k(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1717 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].attn.to_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].attn.to_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[30].attn.to_k.training, 140591004393440) # key = attn.to_k(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1717 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].attn.to_q, accessed_by=DictGetItemGuardAccessor(to_q) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[30].attn.to_q, 140581770783536) # query = attn.to_q(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1716 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].attn.to_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].attn.to_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[30].attn.to_q.training, 140591004393440) # query = attn.to_q(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1716 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].attn.to_v, accessed_by=DictGetItemGuardAccessor(to_v) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[30].attn.to_v, 140581770783584) # value = attn.to_v(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1718 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].attn.to_v.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].attn.to_v.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[30].attn.to_v.training, 140591004393440) # value = attn.to_v(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1718 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].attn.norm_k, accessed_by=DictGetItemGuardAccessor(norm_k) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[30].attn.norm_k, 140581770783488) # if attn.norm_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1729 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].attn.norm_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[30].attn.norm_k.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].attn.norm_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[30].attn.norm_k.training, 140591004393440) # if attn.norm_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1729 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].attn.norm_k.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[30].attn.norm_k.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].attn.norm_k._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].attn.norm_k.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[30].attn.norm_k.weight, 140581772499904) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].attn.norm_k._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].attn.norm_k._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].attn.norm_k._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].attn.norm_k._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].attn.norm_q, accessed_by=DictGetItemGuardAccessor(norm_q) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[30].attn.norm_q, 140581770783344) # if attn.norm_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1727 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].attn.norm_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[30].attn.norm_q.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].attn.norm_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[30].attn.norm_q.training, 140591004393440) # if attn.norm_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1727 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].attn.norm_q.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[30].attn.norm_q.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].attn.norm_q._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].attn.norm_q.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[30].attn.norm_q.weight, 140581766117024) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].attn.norm_q._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].attn.norm_q._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].attn.norm_q._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].attn.norm_q._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].attn.heads, accessed_by=DictGetItemGuardAccessor(heads) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[30].attn.heads == 24 # head_dim = inner_dim // attn.heads # diffusers/src/diffusers/models/attention_processor.py:1721 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].attn.processor, accessed_by=DictGetItemGuardAccessor(processor) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[30].attn.processor, 93831581524080) # attn_parameters = set(inspect.signature(self.processor.__call__).parameters.keys()) # diffusers/src/diffusers/models/attention_processor.py:479 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[30].attn.processor, 140581770783248) # return self.processor( # diffusers/src/diffusers/models/attention_processor.py:490 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].attn._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].attn._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].attn._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].attn._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].attn.forward, accessed_by=GetAttrGuardAccessor(forward) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].attn.forward, accessed_by=FuncDefaultsGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].attn.forward.__defaults__[0], accessed_by=GetItemGuardAccessor(0) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[30].attn.forward.__defaults__[0], 140591004478624) # batch_size, _, _ = hidden_states.shape if encoder_hidden_states is None else encoder_hidden_states.shape # diffusers/src/diffusers/models/attention_processor.py:1713 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].norm, accessed_by=DictGetItemGuardAccessor(norm) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[30].norm, 140581770782912) # norm_hidden_states, gate = self.norm(hidden_states, emb=temb) # diffusers/src/diffusers/models/transformers/transformer_flux.py:88 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].norm.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[30].norm.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].norm.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[30].norm.training, 140591004393440) # norm_hidden_states, gate = self.norm(hidden_states, emb=temb) # diffusers/src/diffusers/models/transformers/transformer_flux.py:88 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].norm._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].norm.norm, accessed_by=DictGetItemGuardAccessor(norm) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[30].norm.norm, 140581770783056) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:171 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].norm.norm.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].norm.norm.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[30].norm.norm.training, 140591004393440) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:171 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].norm.silu, accessed_by=DictGetItemGuardAccessor(silu) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[30].norm.silu, 140581770782960) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].norm.silu.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].norm.silu.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[30].norm.silu.training, 140591004393440) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].norm.linear, accessed_by=DictGetItemGuardAccessor(linear) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[30].norm.linear, 140581770783008) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].norm.linear.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].norm.linear.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[30].norm.linear.training, 140591004393440) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].norm._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].norm._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].norm._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].norm._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].act_mlp, accessed_by=DictGetItemGuardAccessor(act_mlp) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[30].act_mlp, 140581770783152) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].act_mlp.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].act_mlp.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[30].act_mlp.training, 140591004393440) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].proj_mlp, accessed_by=DictGetItemGuardAccessor(proj_mlp) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[30].proj_mlp, 140581770783104) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].proj_mlp.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].proj_mlp.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[30].proj_mlp.training, 140591004393440) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].proj_out, accessed_by=DictGetItemGuardAccessor(proj_out) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[30].proj_out, 140581770783200) # hidden_states = gate * self.proj_out(hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:98 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].proj_out.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].proj_out.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[30].proj_out.training, 140591004393440) # hidden_states = gate * self.proj_out(hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:98 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31], accessed_by=GetItemGuardAccessor(31) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[31], 140581770782864) # for index_block, block in enumerate(self.single_transformer_blocks): # diffusers/src/diffusers/models/transformers/transformer_flux.py:509 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[31].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[31].training, 140591004393440) # for index_block, block in enumerate(self.single_transformer_blocks): # diffusers/src/diffusers/models/transformers/transformer_flux.py:509 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].attn, accessed_by=DictGetItemGuardAccessor(attn) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[31].attn, 140581770784064) # attn_output = self.attn( # diffusers/src/diffusers/models/transformers/transformer_flux.py:91 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].attn.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[31].attn.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].attn.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[31].attn.training, 140591004393440) # attn_output = self.attn( # diffusers/src/diffusers/models/transformers/transformer_flux.py:91 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].attn._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].attn.to_k, accessed_by=DictGetItemGuardAccessor(to_k) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[31].attn.to_k, 140581770784208) # key = attn.to_k(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1717 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].attn.to_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].attn.to_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[31].attn.to_k.training, 140591004393440) # key = attn.to_k(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1717 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].attn.to_q, accessed_by=DictGetItemGuardAccessor(to_q) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[31].attn.to_q, 140581770784304) # query = attn.to_q(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1716 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].attn.to_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].attn.to_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[31].attn.to_q.training, 140591004393440) # query = attn.to_q(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1716 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].attn.to_v, accessed_by=DictGetItemGuardAccessor(to_v) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[31].attn.to_v, 140581770784352) # value = attn.to_v(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1718 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].attn.to_v.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].attn.to_v.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[31].attn.to_v.training, 140591004393440) # value = attn.to_v(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1718 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].attn.norm_k, accessed_by=DictGetItemGuardAccessor(norm_k) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[31].attn.norm_k, 140581770784256) # if attn.norm_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1729 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].attn.norm_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[31].attn.norm_k.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].attn.norm_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[31].attn.norm_k.training, 140591004393440) # if attn.norm_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1729 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].attn.norm_k.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[31].attn.norm_k.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].attn.norm_k._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].attn.norm_k.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[31].attn.norm_k.weight, 140581783347232) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].attn.norm_k._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].attn.norm_k._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].attn.norm_k._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].attn.norm_k._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].attn.norm_q, accessed_by=DictGetItemGuardAccessor(norm_q) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[31].attn.norm_q, 140581770784112) # if attn.norm_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1727 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].attn.norm_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[31].attn.norm_q.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].attn.norm_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[31].attn.norm_q.training, 140591004393440) # if attn.norm_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1727 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].attn.norm_q.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[31].attn.norm_q.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].attn.norm_q._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].attn.norm_q.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[31].attn.norm_q.weight, 140581766104784) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].attn.norm_q._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].attn.norm_q._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].attn.norm_q._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].attn.norm_q._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].attn.heads, accessed_by=DictGetItemGuardAccessor(heads) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[31].attn.heads == 24 # head_dim = inner_dim // attn.heads # diffusers/src/diffusers/models/attention_processor.py:1721 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].attn.processor, accessed_by=DictGetItemGuardAccessor(processor) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[31].attn.processor, 93831581524080) # attn_parameters = set(inspect.signature(self.processor.__call__).parameters.keys()) # diffusers/src/diffusers/models/attention_processor.py:479 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[31].attn.processor, 140581770784016) # return self.processor( # diffusers/src/diffusers/models/attention_processor.py:490 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].attn._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].attn._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].attn._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].attn._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].attn.forward, accessed_by=GetAttrGuardAccessor(forward) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].attn.forward, accessed_by=FuncDefaultsGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].attn.forward.__defaults__[0], accessed_by=GetItemGuardAccessor(0) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[31].attn.forward.__defaults__[0], 140591004478624) # batch_size, _, _ = hidden_states.shape if encoder_hidden_states is None else encoder_hidden_states.shape # diffusers/src/diffusers/models/attention_processor.py:1713 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].norm, accessed_by=DictGetItemGuardAccessor(norm) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[31].norm, 140581770783680) # norm_hidden_states, gate = self.norm(hidden_states, emb=temb) # diffusers/src/diffusers/models/transformers/transformer_flux.py:88 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].norm.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[31].norm.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].norm.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[31].norm.training, 140591004393440) # norm_hidden_states, gate = self.norm(hidden_states, emb=temb) # diffusers/src/diffusers/models/transformers/transformer_flux.py:88 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].norm._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].norm.norm, accessed_by=DictGetItemGuardAccessor(norm) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[31].norm.norm, 140581770783824) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:171 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].norm.norm.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].norm.norm.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[31].norm.norm.training, 140591004393440) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:171 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].norm.silu, accessed_by=DictGetItemGuardAccessor(silu) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[31].norm.silu, 140581770783728) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].norm.silu.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].norm.silu.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[31].norm.silu.training, 140591004393440) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].norm.linear, accessed_by=DictGetItemGuardAccessor(linear) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[31].norm.linear, 140581770783776) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].norm.linear.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].norm.linear.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[31].norm.linear.training, 140591004393440) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].norm._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].norm._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].norm._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].norm._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].act_mlp, accessed_by=DictGetItemGuardAccessor(act_mlp) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[31].act_mlp, 140581770783920) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].act_mlp.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].act_mlp.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[31].act_mlp.training, 140591004393440) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].proj_mlp, accessed_by=DictGetItemGuardAccessor(proj_mlp) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[31].proj_mlp, 140581770783872) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].proj_mlp.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].proj_mlp.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[31].proj_mlp.training, 140591004393440) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].proj_out, accessed_by=DictGetItemGuardAccessor(proj_out) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[31].proj_out, 140581770783968) # hidden_states = gate * self.proj_out(hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:98 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].proj_out.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].proj_out.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[31].proj_out.training, 140591004393440) # hidden_states = gate * self.proj_out(hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:98 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32], accessed_by=GetItemGuardAccessor(32) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[32], 140581770783632) # for index_block, block in enumerate(self.single_transformer_blocks): # diffusers/src/diffusers/models/transformers/transformer_flux.py:509 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[32].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[32].training, 140591004393440) # for index_block, block in enumerate(self.single_transformer_blocks): # diffusers/src/diffusers/models/transformers/transformer_flux.py:509 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].attn, accessed_by=DictGetItemGuardAccessor(attn) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[32].attn, 140581770784832) # attn_output = self.attn( # diffusers/src/diffusers/models/transformers/transformer_flux.py:91 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].attn.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[32].attn.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].attn.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[32].attn.training, 140591004393440) # attn_output = self.attn( # diffusers/src/diffusers/models/transformers/transformer_flux.py:91 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].attn._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].attn.to_k, accessed_by=DictGetItemGuardAccessor(to_k) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[32].attn.to_k, 140581770784976) # key = attn.to_k(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1717 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].attn.to_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].attn.to_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[32].attn.to_k.training, 140591004393440) # key = attn.to_k(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1717 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].attn.to_q, accessed_by=DictGetItemGuardAccessor(to_q) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[32].attn.to_q, 140581770785072) # query = attn.to_q(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1716 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].attn.to_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].attn.to_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[32].attn.to_q.training, 140591004393440) # query = attn.to_q(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1716 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].attn.to_v, accessed_by=DictGetItemGuardAccessor(to_v) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[32].attn.to_v, 140581770785120) # value = attn.to_v(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1718 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].attn.to_v.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].attn.to_v.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[32].attn.to_v.training, 140591004393440) # value = attn.to_v(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1718 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].attn.norm_k, accessed_by=DictGetItemGuardAccessor(norm_k) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[32].attn.norm_k, 140581770785024) # if attn.norm_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1729 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].attn.norm_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[32].attn.norm_k.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].attn.norm_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[32].attn.norm_k.training, 140591004393440) # if attn.norm_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1729 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].attn.norm_k.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[32].attn.norm_k.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].attn.norm_k._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].attn.norm_k.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[32].attn.norm_k.weight, 140581773255808) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].attn.norm_k._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].attn.norm_k._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].attn.norm_k._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].attn.norm_k._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].attn.norm_q, accessed_by=DictGetItemGuardAccessor(norm_q) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[32].attn.norm_q, 140581770784880) # if attn.norm_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1727 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].attn.norm_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[32].attn.norm_q.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].attn.norm_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[32].attn.norm_q.training, 140591004393440) # if attn.norm_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1727 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].attn.norm_q.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[32].attn.norm_q.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].attn.norm_q._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].attn.norm_q.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[32].attn.norm_q.weight, 140581783349712) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].attn.norm_q._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].attn.norm_q._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].attn.norm_q._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].attn.norm_q._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].attn.heads, accessed_by=DictGetItemGuardAccessor(heads) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[32].attn.heads == 24 # head_dim = inner_dim // attn.heads # diffusers/src/diffusers/models/attention_processor.py:1721 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].attn.processor, accessed_by=DictGetItemGuardAccessor(processor) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[32].attn.processor, 93831581524080) # attn_parameters = set(inspect.signature(self.processor.__call__).parameters.keys()) # diffusers/src/diffusers/models/attention_processor.py:479 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[32].attn.processor, 140581770784784) # return self.processor( # diffusers/src/diffusers/models/attention_processor.py:490 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].attn._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].attn._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].attn._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].attn._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].attn.forward, accessed_by=GetAttrGuardAccessor(forward) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].attn.forward, accessed_by=FuncDefaultsGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].attn.forward.__defaults__[0], accessed_by=GetItemGuardAccessor(0) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[32].attn.forward.__defaults__[0], 140591004478624) # batch_size, _, _ = hidden_states.shape if encoder_hidden_states is None else encoder_hidden_states.shape # diffusers/src/diffusers/models/attention_processor.py:1713 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].norm, accessed_by=DictGetItemGuardAccessor(norm) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[32].norm, 140581770784448) # norm_hidden_states, gate = self.norm(hidden_states, emb=temb) # diffusers/src/diffusers/models/transformers/transformer_flux.py:88 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].norm.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[32].norm.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].norm.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[32].norm.training, 140591004393440) # norm_hidden_states, gate = self.norm(hidden_states, emb=temb) # diffusers/src/diffusers/models/transformers/transformer_flux.py:88 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].norm._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].norm.norm, accessed_by=DictGetItemGuardAccessor(norm) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[32].norm.norm, 140581770784592) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:171 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].norm.norm.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].norm.norm.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[32].norm.norm.training, 140591004393440) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:171 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].norm.silu, accessed_by=DictGetItemGuardAccessor(silu) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[32].norm.silu, 140581770784496) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].norm.silu.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].norm.silu.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[32].norm.silu.training, 140591004393440) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].norm.linear, accessed_by=DictGetItemGuardAccessor(linear) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[32].norm.linear, 140581770784544) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].norm.linear.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].norm.linear.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[32].norm.linear.training, 140591004393440) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].norm._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].norm._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].norm._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].norm._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].act_mlp, accessed_by=DictGetItemGuardAccessor(act_mlp) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[32].act_mlp, 140581770784688) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].act_mlp.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].act_mlp.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[32].act_mlp.training, 140591004393440) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].proj_mlp, accessed_by=DictGetItemGuardAccessor(proj_mlp) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[32].proj_mlp, 140581770784640) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].proj_mlp.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].proj_mlp.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[32].proj_mlp.training, 140591004393440) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].proj_out, accessed_by=DictGetItemGuardAccessor(proj_out) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[32].proj_out, 140581770784736) # hidden_states = gate * self.proj_out(hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:98 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].proj_out.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].proj_out.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[32].proj_out.training, 140591004393440) # hidden_states = gate * self.proj_out(hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:98 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33], accessed_by=GetItemGuardAccessor(33) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[33], 140581770784400) # for index_block, block in enumerate(self.single_transformer_blocks): # diffusers/src/diffusers/models/transformers/transformer_flux.py:509 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[33].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[33].training, 140591004393440) # for index_block, block in enumerate(self.single_transformer_blocks): # diffusers/src/diffusers/models/transformers/transformer_flux.py:509 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].attn, accessed_by=DictGetItemGuardAccessor(attn) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[33].attn, 140581770785600) # attn_output = self.attn( # diffusers/src/diffusers/models/transformers/transformer_flux.py:91 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].attn.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[33].attn.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].attn.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[33].attn.training, 140591004393440) # attn_output = self.attn( # diffusers/src/diffusers/models/transformers/transformer_flux.py:91 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].attn._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].attn.to_k, accessed_by=DictGetItemGuardAccessor(to_k) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[33].attn.to_k, 140581770785744) # key = attn.to_k(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1717 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].attn.to_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].attn.to_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[33].attn.to_k.training, 140591004393440) # key = attn.to_k(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1717 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].attn.to_q, accessed_by=DictGetItemGuardAccessor(to_q) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[33].attn.to_q, 140581770785840) # query = attn.to_q(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1716 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].attn.to_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].attn.to_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[33].attn.to_q.training, 140591004393440) # query = attn.to_q(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1716 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].attn.to_v, accessed_by=DictGetItemGuardAccessor(to_v) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[33].attn.to_v, 140581770785888) # value = attn.to_v(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1718 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].attn.to_v.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].attn.to_v.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[33].attn.to_v.training, 140591004393440) # value = attn.to_v(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1718 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].attn.norm_k, accessed_by=DictGetItemGuardAccessor(norm_k) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[33].attn.norm_k, 140581770785792) # if attn.norm_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1729 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].attn.norm_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[33].attn.norm_k.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].attn.norm_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[33].attn.norm_k.training, 140591004393440) # if attn.norm_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1729 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].attn.norm_k.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[33].attn.norm_k.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].attn.norm_k._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].attn.norm_k.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[33].attn.norm_k.weight, 140581766107504) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].attn.norm_k._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].attn.norm_k._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].attn.norm_k._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].attn.norm_k._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].attn.norm_q, accessed_by=DictGetItemGuardAccessor(norm_q) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[33].attn.norm_q, 140581770785648) # if attn.norm_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1727 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].attn.norm_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[33].attn.norm_q.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].attn.norm_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[33].attn.norm_q.training, 140591004393440) # if attn.norm_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1727 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].attn.norm_q.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[33].attn.norm_q.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].attn.norm_q._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].attn.norm_q.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[33].attn.norm_q.weight, 140581773258288) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].attn.norm_q._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].attn.norm_q._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].attn.norm_q._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].attn.norm_q._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].attn.heads, accessed_by=DictGetItemGuardAccessor(heads) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[33].attn.heads == 24 # head_dim = inner_dim // attn.heads # diffusers/src/diffusers/models/attention_processor.py:1721 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].attn.processor, accessed_by=DictGetItemGuardAccessor(processor) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[33].attn.processor, 93831581524080) # attn_parameters = set(inspect.signature(self.processor.__call__).parameters.keys()) # diffusers/src/diffusers/models/attention_processor.py:479 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[33].attn.processor, 140581770785552) # return self.processor( # diffusers/src/diffusers/models/attention_processor.py:490 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].attn._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].attn._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].attn._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].attn._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].attn.forward, accessed_by=GetAttrGuardAccessor(forward) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].attn.forward, accessed_by=FuncDefaultsGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].attn.forward.__defaults__[0], accessed_by=GetItemGuardAccessor(0) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[33].attn.forward.__defaults__[0], 140591004478624) # batch_size, _, _ = hidden_states.shape if encoder_hidden_states is None else encoder_hidden_states.shape # diffusers/src/diffusers/models/attention_processor.py:1713 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].norm, accessed_by=DictGetItemGuardAccessor(norm) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[33].norm, 140581770785216) # norm_hidden_states, gate = self.norm(hidden_states, emb=temb) # diffusers/src/diffusers/models/transformers/transformer_flux.py:88 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].norm.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[33].norm.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].norm.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[33].norm.training, 140591004393440) # norm_hidden_states, gate = self.norm(hidden_states, emb=temb) # diffusers/src/diffusers/models/transformers/transformer_flux.py:88 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].norm._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].norm.norm, accessed_by=DictGetItemGuardAccessor(norm) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[33].norm.norm, 140581770785360) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:171 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].norm.norm.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].norm.norm.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[33].norm.norm.training, 140591004393440) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:171 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].norm.silu, accessed_by=DictGetItemGuardAccessor(silu) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[33].norm.silu, 140581770785264) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].norm.silu.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].norm.silu.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[33].norm.silu.training, 140591004393440) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].norm.linear, accessed_by=DictGetItemGuardAccessor(linear) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[33].norm.linear, 140581770785312) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].norm.linear.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].norm.linear.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[33].norm.linear.training, 140591004393440) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].norm._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].norm._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].norm._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].norm._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].act_mlp, accessed_by=DictGetItemGuardAccessor(act_mlp) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[33].act_mlp, 140581770785456) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].act_mlp.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].act_mlp.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[33].act_mlp.training, 140591004393440) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].proj_mlp, accessed_by=DictGetItemGuardAccessor(proj_mlp) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[33].proj_mlp, 140581770785408) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].proj_mlp.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].proj_mlp.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[33].proj_mlp.training, 140591004393440) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].proj_out, accessed_by=DictGetItemGuardAccessor(proj_out) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[33].proj_out, 140581770785504) # hidden_states = gate * self.proj_out(hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:98 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].proj_out.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].proj_out.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[33].proj_out.training, 140591004393440) # hidden_states = gate * self.proj_out(hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:98 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34], accessed_by=GetItemGuardAccessor(34) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[34], 140581770785168) # for index_block, block in enumerate(self.single_transformer_blocks): # diffusers/src/diffusers/models/transformers/transformer_flux.py:509 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[34].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[34].training, 140591004393440) # for index_block, block in enumerate(self.single_transformer_blocks): # diffusers/src/diffusers/models/transformers/transformer_flux.py:509 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].attn, accessed_by=DictGetItemGuardAccessor(attn) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[34].attn, 140581770786368) # attn_output = self.attn( # diffusers/src/diffusers/models/transformers/transformer_flux.py:91 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].attn.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[34].attn.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].attn.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[34].attn.training, 140591004393440) # attn_output = self.attn( # diffusers/src/diffusers/models/transformers/transformer_flux.py:91 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].attn._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].attn.to_k, accessed_by=DictGetItemGuardAccessor(to_k) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[34].attn.to_k, 140581770786512) # key = attn.to_k(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1717 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].attn.to_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].attn.to_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[34].attn.to_k.training, 140591004393440) # key = attn.to_k(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1717 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].attn.to_q, accessed_by=DictGetItemGuardAccessor(to_q) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[34].attn.to_q, 140581770786608) # query = attn.to_q(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1716 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].attn.to_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].attn.to_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[34].attn.to_q.training, 140591004393440) # query = attn.to_q(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1716 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].attn.to_v, accessed_by=DictGetItemGuardAccessor(to_v) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[34].attn.to_v, 140581770786656) # value = attn.to_v(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1718 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].attn.to_v.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].attn.to_v.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[34].attn.to_v.training, 140591004393440) # value = attn.to_v(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1718 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].attn.norm_k, accessed_by=DictGetItemGuardAccessor(norm_k) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[34].attn.norm_k, 140581770786560) # if attn.norm_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1729 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].attn.norm_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[34].attn.norm_k.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].attn.norm_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[34].attn.norm_k.training, 140591004393440) # if attn.norm_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1729 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].attn.norm_k.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[34].attn.norm_k.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].attn.norm_k._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].attn.norm_k.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[34].attn.norm_k.weight, 140581783350592) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].attn.norm_k._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].attn.norm_k._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].attn.norm_k._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].attn.norm_k._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].attn.norm_q, accessed_by=DictGetItemGuardAccessor(norm_q) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[34].attn.norm_q, 140581770786416) # if attn.norm_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1727 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].attn.norm_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[34].attn.norm_q.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].attn.norm_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[34].attn.norm_q.training, 140591004393440) # if attn.norm_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1727 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].attn.norm_q.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[34].attn.norm_q.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].attn.norm_q._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].attn.norm_q.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[34].attn.norm_q.weight, 140581766108944) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].attn.norm_q._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].attn.norm_q._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].attn.norm_q._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].attn.norm_q._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].attn.heads, accessed_by=DictGetItemGuardAccessor(heads) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[34].attn.heads == 24 # head_dim = inner_dim // attn.heads # diffusers/src/diffusers/models/attention_processor.py:1721 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].attn.processor, accessed_by=DictGetItemGuardAccessor(processor) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[34].attn.processor, 93831581524080) # attn_parameters = set(inspect.signature(self.processor.__call__).parameters.keys()) # diffusers/src/diffusers/models/attention_processor.py:479 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[34].attn.processor, 140581770786320) # return self.processor( # diffusers/src/diffusers/models/attention_processor.py:490 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].attn._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].attn._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].attn._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].attn._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].attn.forward, accessed_by=GetAttrGuardAccessor(forward) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].attn.forward, accessed_by=FuncDefaultsGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].attn.forward.__defaults__[0], accessed_by=GetItemGuardAccessor(0) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[34].attn.forward.__defaults__[0], 140591004478624) # batch_size, _, _ = hidden_states.shape if encoder_hidden_states is None else encoder_hidden_states.shape # diffusers/src/diffusers/models/attention_processor.py:1713 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].norm, accessed_by=DictGetItemGuardAccessor(norm) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[34].norm, 140581770785984) # norm_hidden_states, gate = self.norm(hidden_states, emb=temb) # diffusers/src/diffusers/models/transformers/transformer_flux.py:88 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].norm.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[34].norm.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].norm.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[34].norm.training, 140591004393440) # norm_hidden_states, gate = self.norm(hidden_states, emb=temb) # diffusers/src/diffusers/models/transformers/transformer_flux.py:88 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].norm._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].norm.norm, accessed_by=DictGetItemGuardAccessor(norm) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[34].norm.norm, 140581770786128) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:171 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].norm.norm.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].norm.norm.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[34].norm.norm.training, 140591004393440) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:171 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].norm.silu, accessed_by=DictGetItemGuardAccessor(silu) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[34].norm.silu, 140581770786032) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].norm.silu.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].norm.silu.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[34].norm.silu.training, 140591004393440) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].norm.linear, accessed_by=DictGetItemGuardAccessor(linear) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[34].norm.linear, 140581770786080) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].norm.linear.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].norm.linear.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[34].norm.linear.training, 140591004393440) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].norm._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].norm._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].norm._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].norm._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].act_mlp, accessed_by=DictGetItemGuardAccessor(act_mlp) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[34].act_mlp, 140581770786224) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].act_mlp.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].act_mlp.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[34].act_mlp.training, 140591004393440) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].proj_mlp, accessed_by=DictGetItemGuardAccessor(proj_mlp) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[34].proj_mlp, 140581770786176) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].proj_mlp.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].proj_mlp.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[34].proj_mlp.training, 140591004393440) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].proj_out, accessed_by=DictGetItemGuardAccessor(proj_out) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[34].proj_out, 140581770786272) # hidden_states = gate * self.proj_out(hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:98 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].proj_out.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].proj_out.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[34].proj_out.training, 140591004393440) # hidden_states = gate * self.proj_out(hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:98 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35], accessed_by=GetItemGuardAccessor(35) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[35], 140581770785936) # for index_block, block in enumerate(self.single_transformer_blocks): # diffusers/src/diffusers/models/transformers/transformer_flux.py:509 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[35].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[35].training, 140591004393440) # for index_block, block in enumerate(self.single_transformer_blocks): # diffusers/src/diffusers/models/transformers/transformer_flux.py:509 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].attn, accessed_by=DictGetItemGuardAccessor(attn) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[35].attn, 140581770787136) # attn_output = self.attn( # diffusers/src/diffusers/models/transformers/transformer_flux.py:91 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].attn.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[35].attn.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].attn.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[35].attn.training, 140591004393440) # attn_output = self.attn( # diffusers/src/diffusers/models/transformers/transformer_flux.py:91 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].attn._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].attn.to_k, accessed_by=DictGetItemGuardAccessor(to_k) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[35].attn.to_k, 140581770787280) # key = attn.to_k(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1717 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].attn.to_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].attn.to_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[35].attn.to_k.training, 140591004393440) # key = attn.to_k(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1717 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].attn.to_q, accessed_by=DictGetItemGuardAccessor(to_q) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[35].attn.to_q, 140581770787376) # query = attn.to_q(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1716 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].attn.to_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].attn.to_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[35].attn.to_q.training, 140591004393440) # query = attn.to_q(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1716 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].attn.to_v, accessed_by=DictGetItemGuardAccessor(to_v) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[35].attn.to_v, 140581770787424) # value = attn.to_v(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1718 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].attn.to_v.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].attn.to_v.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[35].attn.to_v.training, 140591004393440) # value = attn.to_v(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1718 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].attn.norm_k, accessed_by=DictGetItemGuardAccessor(norm_k) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[35].attn.norm_k, 140581770787328) # if attn.norm_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1729 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].attn.norm_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[35].attn.norm_k.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].attn.norm_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[35].attn.norm_k.training, 140591004393440) # if attn.norm_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1729 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].attn.norm_k.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[35].attn.norm_k.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].attn.norm_k._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].attn.norm_k.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[35].attn.norm_k.weight, 140581771030704) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].attn.norm_k._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].attn.norm_k._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].attn.norm_k._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].attn.norm_k._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].attn.norm_q, accessed_by=DictGetItemGuardAccessor(norm_q) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[35].attn.norm_q, 140581770787184) # if attn.norm_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1727 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].attn.norm_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[35].attn.norm_q.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].attn.norm_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[35].attn.norm_q.training, 140591004393440) # if attn.norm_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1727 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].attn.norm_q.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[35].attn.norm_q.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].attn.norm_q._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].attn.norm_q.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[35].attn.norm_q.weight, 140581771031264) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].attn.norm_q._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].attn.norm_q._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].attn.norm_q._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].attn.norm_q._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].attn.heads, accessed_by=DictGetItemGuardAccessor(heads) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[35].attn.heads == 24 # head_dim = inner_dim // attn.heads # diffusers/src/diffusers/models/attention_processor.py:1721 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].attn.processor, accessed_by=DictGetItemGuardAccessor(processor) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[35].attn.processor, 93831581524080) # attn_parameters = set(inspect.signature(self.processor.__call__).parameters.keys()) # diffusers/src/diffusers/models/attention_processor.py:479 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[35].attn.processor, 140581770787088) # return self.processor( # diffusers/src/diffusers/models/attention_processor.py:490 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].attn._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].attn._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].attn._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].attn._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].attn.forward, accessed_by=GetAttrGuardAccessor(forward) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].attn.forward, accessed_by=FuncDefaultsGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].attn.forward.__defaults__[0], accessed_by=GetItemGuardAccessor(0) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[35].attn.forward.__defaults__[0], 140591004478624) # batch_size, _, _ = hidden_states.shape if encoder_hidden_states is None else encoder_hidden_states.shape # diffusers/src/diffusers/models/attention_processor.py:1713 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].norm, accessed_by=DictGetItemGuardAccessor(norm) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[35].norm, 140581770786752) # norm_hidden_states, gate = self.norm(hidden_states, emb=temb) # diffusers/src/diffusers/models/transformers/transformer_flux.py:88 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].norm.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[35].norm.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].norm.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[35].norm.training, 140591004393440) # norm_hidden_states, gate = self.norm(hidden_states, emb=temb) # diffusers/src/diffusers/models/transformers/transformer_flux.py:88 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].norm._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].norm.norm, accessed_by=DictGetItemGuardAccessor(norm) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[35].norm.norm, 140581770786896) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:171 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].norm.norm.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].norm.norm.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[35].norm.norm.training, 140591004393440) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:171 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].norm.silu, accessed_by=DictGetItemGuardAccessor(silu) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[35].norm.silu, 140581770786800) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].norm.silu.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].norm.silu.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[35].norm.silu.training, 140591004393440) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].norm.linear, accessed_by=DictGetItemGuardAccessor(linear) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[35].norm.linear, 140581770786848) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].norm.linear.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].norm.linear.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[35].norm.linear.training, 140591004393440) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].norm._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].norm._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].norm._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].norm._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].act_mlp, accessed_by=DictGetItemGuardAccessor(act_mlp) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[35].act_mlp, 140581770786992) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].act_mlp.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].act_mlp.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[35].act_mlp.training, 140591004393440) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].proj_mlp, accessed_by=DictGetItemGuardAccessor(proj_mlp) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[35].proj_mlp, 140581770786944) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].proj_mlp.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].proj_mlp.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[35].proj_mlp.training, 140591004393440) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].proj_out, accessed_by=DictGetItemGuardAccessor(proj_out) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[35].proj_out, 140581770787040) # hidden_states = gate * self.proj_out(hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:98 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].proj_out.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].proj_out.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[35].proj_out.training, 140591004393440) # hidden_states = gate * self.proj_out(hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:98 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36], accessed_by=GetItemGuardAccessor(36) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[36], 140581770786704) # for index_block, block in enumerate(self.single_transformer_blocks): # diffusers/src/diffusers/models/transformers/transformer_flux.py:509 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[36].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[36].training, 140591004393440) # for index_block, block in enumerate(self.single_transformer_blocks): # diffusers/src/diffusers/models/transformers/transformer_flux.py:509 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].attn, accessed_by=DictGetItemGuardAccessor(attn) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[36].attn, 140581770787904) # attn_output = self.attn( # diffusers/src/diffusers/models/transformers/transformer_flux.py:91 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].attn.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[36].attn.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].attn.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[36].attn.training, 140591004393440) # attn_output = self.attn( # diffusers/src/diffusers/models/transformers/transformer_flux.py:91 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].attn._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].attn.to_k, accessed_by=DictGetItemGuardAccessor(to_k) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[36].attn.to_k, 140581770788048) # key = attn.to_k(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1717 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].attn.to_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].attn.to_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[36].attn.to_k.training, 140591004393440) # key = attn.to_k(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1717 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].attn.to_q, accessed_by=DictGetItemGuardAccessor(to_q) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[36].attn.to_q, 140581770788144) # query = attn.to_q(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1716 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].attn.to_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].attn.to_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[36].attn.to_q.training, 140591004393440) # query = attn.to_q(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1716 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].attn.to_v, accessed_by=DictGetItemGuardAccessor(to_v) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[36].attn.to_v, 140581770788192) # value = attn.to_v(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1718 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].attn.to_v.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].attn.to_v.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[36].attn.to_v.training, 140591004393440) # value = attn.to_v(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1718 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].attn.norm_k, accessed_by=DictGetItemGuardAccessor(norm_k) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[36].attn.norm_k, 140581770788096) # if attn.norm_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1729 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].attn.norm_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[36].attn.norm_k.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].attn.norm_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[36].attn.norm_k.training, 140591004393440) # if attn.norm_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1729 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].attn.norm_k.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[36].attn.norm_k.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].attn.norm_k._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].attn.norm_k.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[36].attn.norm_k.weight, 140581766111904) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].attn.norm_k._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].attn.norm_k._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].attn.norm_k._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].attn.norm_k._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].attn.norm_q, accessed_by=DictGetItemGuardAccessor(norm_q) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[36].attn.norm_q, 140581770787952) # if attn.norm_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1727 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].attn.norm_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[36].attn.norm_q.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].attn.norm_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[36].attn.norm_q.training, 140591004393440) # if attn.norm_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1727 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].attn.norm_q.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[36].attn.norm_q.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].attn.norm_q._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].attn.norm_q.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[36].attn.norm_q.weight, 140581773261648) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].attn.norm_q._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].attn.norm_q._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].attn.norm_q._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].attn.norm_q._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].attn.heads, accessed_by=DictGetItemGuardAccessor(heads) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[36].attn.heads == 24 # head_dim = inner_dim // attn.heads # diffusers/src/diffusers/models/attention_processor.py:1721 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].attn.processor, accessed_by=DictGetItemGuardAccessor(processor) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[36].attn.processor, 93831581524080) # attn_parameters = set(inspect.signature(self.processor.__call__).parameters.keys()) # diffusers/src/diffusers/models/attention_processor.py:479 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[36].attn.processor, 140581770787856) # return self.processor( # diffusers/src/diffusers/models/attention_processor.py:490 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].attn._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].attn._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].attn._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].attn._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].attn.forward, accessed_by=GetAttrGuardAccessor(forward) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].attn.forward, accessed_by=FuncDefaultsGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].attn.forward.__defaults__[0], accessed_by=GetItemGuardAccessor(0) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[36].attn.forward.__defaults__[0], 140591004478624) # batch_size, _, _ = hidden_states.shape if encoder_hidden_states is None else encoder_hidden_states.shape # diffusers/src/diffusers/models/attention_processor.py:1713 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].norm, accessed_by=DictGetItemGuardAccessor(norm) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[36].norm, 140581770787520) # norm_hidden_states, gate = self.norm(hidden_states, emb=temb) # diffusers/src/diffusers/models/transformers/transformer_flux.py:88 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].norm.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[36].norm.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].norm.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[36].norm.training, 140591004393440) # norm_hidden_states, gate = self.norm(hidden_states, emb=temb) # diffusers/src/diffusers/models/transformers/transformer_flux.py:88 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].norm._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].norm.norm, accessed_by=DictGetItemGuardAccessor(norm) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[36].norm.norm, 140581770787664) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:171 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].norm.norm.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].norm.norm.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[36].norm.norm.training, 140591004393440) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:171 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].norm.silu, accessed_by=DictGetItemGuardAccessor(silu) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[36].norm.silu, 140581770787568) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].norm.silu.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].norm.silu.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[36].norm.silu.training, 140591004393440) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].norm.linear, accessed_by=DictGetItemGuardAccessor(linear) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[36].norm.linear, 140581770787616) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].norm.linear.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].norm.linear.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[36].norm.linear.training, 140591004393440) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].norm._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].norm._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].norm._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].norm._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].act_mlp, accessed_by=DictGetItemGuardAccessor(act_mlp) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[36].act_mlp, 140581770787760) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].act_mlp.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].act_mlp.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[36].act_mlp.training, 140591004393440) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].proj_mlp, accessed_by=DictGetItemGuardAccessor(proj_mlp) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[36].proj_mlp, 140581770787712) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].proj_mlp.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].proj_mlp.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[36].proj_mlp.training, 140591004393440) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].proj_out, accessed_by=DictGetItemGuardAccessor(proj_out) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[36].proj_out, 140581770787808) # hidden_states = gate * self.proj_out(hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:98 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].proj_out.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].proj_out.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[36].proj_out.training, 140591004393440) # hidden_states = gate * self.proj_out(hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:98 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37], accessed_by=GetItemGuardAccessor(37) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[37], 140581770787472) # for index_block, block in enumerate(self.single_transformer_blocks): # diffusers/src/diffusers/models/transformers/transformer_flux.py:509 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[37].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[37].training, 140591004393440) # for index_block, block in enumerate(self.single_transformer_blocks): # diffusers/src/diffusers/models/transformers/transformer_flux.py:509 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].attn, accessed_by=DictGetItemGuardAccessor(attn) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[37].attn, 140581770788672) # attn_output = self.attn( # diffusers/src/diffusers/models/transformers/transformer_flux.py:91 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].attn.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[37].attn.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].attn.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[37].attn.training, 140591004393440) # attn_output = self.attn( # diffusers/src/diffusers/models/transformers/transformer_flux.py:91 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].attn._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].attn.to_k, accessed_by=DictGetItemGuardAccessor(to_k) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[37].attn.to_k, 140581765087296) # key = attn.to_k(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1717 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].attn.to_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].attn.to_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[37].attn.to_k.training, 140591004393440) # key = attn.to_k(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1717 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].attn.to_q, accessed_by=DictGetItemGuardAccessor(to_q) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[37].attn.to_q, 140581765087344) # query = attn.to_q(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1716 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].attn.to_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].attn.to_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[37].attn.to_q.training, 140591004393440) # query = attn.to_q(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1716 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].attn.to_v, accessed_by=DictGetItemGuardAccessor(to_v) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[37].attn.to_v, 140581765087392) # value = attn.to_v(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1718 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].attn.to_v.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].attn.to_v.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[37].attn.to_v.training, 140591004393440) # value = attn.to_v(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1718 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].attn.norm_k, accessed_by=DictGetItemGuardAccessor(norm_k) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[37].attn.norm_k, 140581770788816) # if attn.norm_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1729 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].attn.norm_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[37].attn.norm_k.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].attn.norm_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[37].attn.norm_k.training, 140591004393440) # if attn.norm_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1729 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].attn.norm_k.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[37].attn.norm_k.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].attn.norm_k._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].attn.norm_k.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[37].attn.norm_k.weight, 140581766113424) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].attn.norm_k._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].attn.norm_k._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].attn.norm_k._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].attn.norm_k._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].attn.norm_q, accessed_by=DictGetItemGuardAccessor(norm_q) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[37].attn.norm_q, 140581770788720) # if attn.norm_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1727 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].attn.norm_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[37].attn.norm_q.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].attn.norm_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[37].attn.norm_q.training, 140591004393440) # if attn.norm_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1727 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].attn.norm_q.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[37].attn.norm_q.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].attn.norm_q._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].attn.norm_q.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[37].attn.norm_q.weight, 140581766113904) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].attn.norm_q._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].attn.norm_q._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].attn.norm_q._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].attn.norm_q._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].attn.heads, accessed_by=DictGetItemGuardAccessor(heads) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[37].attn.heads == 24 # head_dim = inner_dim // attn.heads # diffusers/src/diffusers/models/attention_processor.py:1721 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].attn.processor, accessed_by=DictGetItemGuardAccessor(processor) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[37].attn.processor, 93831581524080) # attn_parameters = set(inspect.signature(self.processor.__call__).parameters.keys()) # diffusers/src/diffusers/models/attention_processor.py:479 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[37].attn.processor, 140581770788624) # return self.processor( # diffusers/src/diffusers/models/attention_processor.py:490 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].attn._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].attn._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].attn._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].attn._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].attn.forward, accessed_by=GetAttrGuardAccessor(forward) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].attn.forward, accessed_by=FuncDefaultsGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].attn.forward.__defaults__[0], accessed_by=GetItemGuardAccessor(0) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[37].attn.forward.__defaults__[0], 140591004478624) # batch_size, _, _ = hidden_states.shape if encoder_hidden_states is None else encoder_hidden_states.shape # diffusers/src/diffusers/models/attention_processor.py:1713 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].norm, accessed_by=DictGetItemGuardAccessor(norm) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[37].norm, 140581770788288) # norm_hidden_states, gate = self.norm(hidden_states, emb=temb) # diffusers/src/diffusers/models/transformers/transformer_flux.py:88 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].norm.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[37].norm.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].norm.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[37].norm.training, 140591004393440) # norm_hidden_states, gate = self.norm(hidden_states, emb=temb) # diffusers/src/diffusers/models/transformers/transformer_flux.py:88 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].norm._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].norm.norm, accessed_by=DictGetItemGuardAccessor(norm) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[37].norm.norm, 140581770788432) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:171 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].norm.norm.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].norm.norm.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[37].norm.norm.training, 140591004393440) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:171 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].norm.silu, accessed_by=DictGetItemGuardAccessor(silu) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[37].norm.silu, 140581770788336) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].norm.silu.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].norm.silu.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[37].norm.silu.training, 140591004393440) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].norm.linear, accessed_by=DictGetItemGuardAccessor(linear) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[37].norm.linear, 140581770788384) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].norm.linear.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].norm.linear.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[37].norm.linear.training, 140591004393440) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].norm._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].norm._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].norm._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].norm._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].act_mlp, accessed_by=DictGetItemGuardAccessor(act_mlp) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[37].act_mlp, 140581770788528) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].act_mlp.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].act_mlp.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[37].act_mlp.training, 140591004393440) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].proj_mlp, accessed_by=DictGetItemGuardAccessor(proj_mlp) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[37].proj_mlp, 140581770788480) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].proj_mlp.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].proj_mlp.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[37].proj_mlp.training, 140591004393440) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].proj_out, accessed_by=DictGetItemGuardAccessor(proj_out) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[37].proj_out, 140581770788576) # hidden_states = gate * self.proj_out(hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:98 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].proj_out.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].proj_out.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[37].proj_out.training, 140591004393440) # hidden_states = gate * self.proj_out(hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:98 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | +- GuardManager: source=L['img_ids'], accessed_by=DictGetItemGuardAccessor(img_ids) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | +- TENSOR_MATCH: check_tensor(L['img_ids'], Tensor, DispatchKeySet(CUDA, BackendSelect, ADInplaceOrView, AutogradCUDA), torch.bfloat16, device=0, requires_grad=False, size=[4096, 3], stride=[3, 1]) # if img_ids.ndim == 3: # diffusers/src/diffusers/models/transformers/transformer_flux.py:462 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | +- NO_HASATTR: hasattr(L['img_ids'], '_dynamo_dynamic_indices') == False # if img_ids.ndim == 3: # diffusers/src/diffusers/models/transformers/transformer_flux.py:462 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | +- NO_TENSOR_ALIASING: check_no_aliasing(L['img_ids'], L['txt_ids'], L['guidance'], L['timestep'], L['hidden_states'], L['pooled_projections'], L['encoder_hidden_states']) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | +- GuardManager: source=L['txt_ids'], accessed_by=DictGetItemGuardAccessor(txt_ids) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | +- TENSOR_MATCH: check_tensor(L['txt_ids'], Tensor, DispatchKeySet(CUDA, BackendSelect, ADInplaceOrView, AutogradCUDA), torch.bfloat16, device=0, requires_grad=False, size=[512, 3], stride=[3, 1]) # if txt_ids.ndim == 3: # diffusers/src/diffusers/models/transformers/transformer_flux.py:456 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | +- NO_HASATTR: hasattr(L['txt_ids'], '_dynamo_dynamic_indices') == False # if txt_ids.ndim == 3: # diffusers/src/diffusers/models/transformers/transformer_flux.py:456 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | +- NO_TENSOR_ALIASING: check_no_aliasing(L['img_ids'], L['txt_ids'], L['guidance'], L['timestep'], L['hidden_states'], L['pooled_projections'], L['encoder_hidden_states']) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | +- GuardManager: source=L['guidance'], accessed_by=DictGetItemGuardAccessor(guidance) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | +- TENSOR_MATCH: check_tensor(L['guidance'], Tensor, DispatchKeySet(CUDA, BackendSelect, ADInplaceOrView, AutogradCUDA), torch.float32, device=0, requires_grad=False, size=[1], stride=[1]) # if guidance is not None: # diffusers/src/diffusers/models/transformers/transformer_flux.py:445 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | +- NO_HASATTR: hasattr(L['guidance'], '_dynamo_dynamic_indices') == False # if guidance is not None: # diffusers/src/diffusers/models/transformers/transformer_flux.py:445 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | +- NO_TENSOR_ALIASING: check_no_aliasing(L['img_ids'], L['txt_ids'], L['guidance'], L['timestep'], L['hidden_states'], L['pooled_projections'], L['encoder_hidden_states']) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | +- GuardManager: source=L['timestep'], accessed_by=DictGetItemGuardAccessor(timestep) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | +- TENSOR_MATCH: check_tensor(L['timestep'], Tensor, DispatchKeySet(CUDA, BackendSelect, ADInplaceOrView, AutogradCUDA), torch.bfloat16, device=0, requires_grad=False, size=[1], stride=[1]) # timestep = timestep.to(hidden_states.dtype) * 1000 # diffusers/src/diffusers/models/transformers/transformer_flux.py:444 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | +- NO_HASATTR: hasattr(L['timestep'], '_dynamo_dynamic_indices') == False # timestep = timestep.to(hidden_states.dtype) * 1000 # diffusers/src/diffusers/models/transformers/transformer_flux.py:444 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | +- NO_TENSOR_ALIASING: check_no_aliasing(L['img_ids'], L['txt_ids'], L['guidance'], L['timestep'], L['hidden_states'], L['pooled_projections'], L['encoder_hidden_states']) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | +- GuardManager: source=L['return_dict'], accessed_by=DictGetItemGuardAccessor(return_dict) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | +- ID_MATCH: ___check_obj_id(L['return_dict'], 140591004393440) # if not return_dict: # diffusers/src/diffusers/models/transformers/transformer_flux.py:555 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | +- GuardManager: source=L['hidden_states'], accessed_by=DictGetItemGuardAccessor(hidden_states) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | +- TENSOR_MATCH: check_tensor(L['hidden_states'], Tensor, DispatchKeySet(CUDA, BackendSelect, ADInplaceOrView, AutogradCUDA), torch.bfloat16, device=0, requires_grad=False, size=[1, 4096, 64], stride=[262144, 64, 1]) # hidden_states = self.x_embedder(hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:442 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | +- NO_HASATTR: hasattr(L['hidden_states'], '_dynamo_dynamic_indices') == False # hidden_states = self.x_embedder(hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:442 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | +- NO_TENSOR_ALIASING: check_no_aliasing(L['img_ids'], L['txt_ids'], L['guidance'], L['timestep'], L['hidden_states'], L['pooled_projections'], L['encoder_hidden_states']) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | +- GuardManager: source=L['pooled_projections'], accessed_by=DictGetItemGuardAccessor(pooled_projections) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | +- TENSOR_MATCH: check_tensor(L['pooled_projections'], Tensor, DispatchKeySet(CUDA, BackendSelect, ADInplaceOrView, AutogradCUDA), torch.bfloat16, device=0, requires_grad=False, size=[1, 768], stride=[768, 1]) # timesteps_emb = self.timestep_embedder(timesteps_proj.to(dtype=pooled_projection.dtype)) # (N, D) # diffusers/src/diffusers/models/embeddings.py:1060 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | +- NO_HASATTR: hasattr(L['pooled_projections'], '_dynamo_dynamic_indices') == False # timesteps_emb = self.timestep_embedder(timesteps_proj.to(dtype=pooled_projection.dtype)) # (N, D) # diffusers/src/diffusers/models/embeddings.py:1060 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | +- NO_TENSOR_ALIASING: check_no_aliasing(L['img_ids'], L['txt_ids'], L['guidance'], L['timestep'], L['hidden_states'], L['pooled_projections'], L['encoder_hidden_states']) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | +- GuardManager: source=L['encoder_hidden_states'], accessed_by=DictGetItemGuardAccessor(encoder_hidden_states) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | +- TENSOR_MATCH: check_tensor(L['encoder_hidden_states'], Tensor, DispatchKeySet(CUDA, BackendSelect, ADInplaceOrView, AutogradCUDA), torch.bfloat16, device=0, requires_grad=False, size=[1, 512, 4096], stride=[2097152, 4096, 1]) # encoder_hidden_states = self.context_embedder(encoder_hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:454 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | +- NO_HASATTR: hasattr(L['encoder_hidden_states'], '_dynamo_dynamic_indices') == False # encoder_hidden_states = self.context_embedder(encoder_hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:454 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | +- NO_TENSOR_ALIASING: check_no_aliasing(L['img_ids'], L['txt_ids'], L['guidance'], L['timestep'], L['hidden_states'], L['pooled_projections'], L['encoder_hidden_states']) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | +- GuardManager: source=L['joint_attention_kwargs'], accessed_by=DictGetItemGuardAccessor(joint_attention_kwargs) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | +- ID_MATCH: ___check_obj_id(L['joint_attention_kwargs'], 140591004478624) # if joint_attention_kwargs is not None: # diffusers/src/diffusers/models/transformers/transformer_flux.py:428 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | +- GuardManager: source=L['controlnet_block_samples'], accessed_by=DictGetItemGuardAccessor(controlnet_block_samples) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | +- ID_MATCH: ___check_obj_id(L['controlnet_block_samples'], 140591004478624) # if controlnet_block_samples is not None: # diffusers/src/diffusers/models/transformers/transformer_flux.py:502 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | +- GuardManager: source=L['controlnet_single_block_samples'], accessed_by=DictGetItemGuardAccessor(controlnet_single_block_samples) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | +- ID_MATCH: ___check_obj_id(L['controlnet_single_block_samples'], 140591004478624) # if controlnet_single_block_samples is not None: # diffusers/src/diffusers/models/transformers/transformer_flux.py:538 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | +- GuardManager: source=G, accessed_by=GlobalsGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | +- GuardManager: source=G['torch'], accessed_by=DictGetItemGuardAccessor(torch) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | +- ID_MATCH: ___check_obj_id(G['torch'], 140590979095808) # ids = torch.cat((txt_ids, img_ids), dim=0) # diffusers/src/diffusers/models/transformers/transformer_flux.py:468 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | +- GuardManager: source=G['torch'].cat, accessed_by=GetAttrGuardAccessor(cat) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | +- ID_MATCH: ___check_obj_id(G['torch'].cat, 140590976095136) # ids = torch.cat((txt_ids, img_ids), dim=0) # diffusers/src/diffusers/models/transformers/transformer_flux.py:468 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | +- GuardManager: source=G['torch'].float16, accessed_by=GetAttrGuardAccessor(float16) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | +- EQUALS_MATCH: G['torch'].float16 == torch.float16 # if encoder_hidden_states.dtype == torch.float16: # diffusers/src/diffusers/models/transformers/transformer_flux.py:200 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | +- GuardManager: source=G['USE_PEFT_BACKEND'], accessed_by=DictGetItemGuardAccessor(USE_PEFT_BACKEND) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | +- ID_MATCH: ___check_obj_id(G['USE_PEFT_BACKEND'], 140591004393408) # if USE_PEFT_BACKEND: # diffusers/src/diffusers/models/transformers/transformer_flux.py:434 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | +- GuardManager: source=G['scale_lora_layers'], accessed_by=DictGetItemGuardAccessor(scale_lora_layers) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | +- GuardManager: source=G['scale_lora_layers'].__code__, accessed_by=GetAttrGuardAccessor(__code__) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | +- ID_MATCH: ___check_obj_id(G['scale_lora_layers'].__code__, 140585209572752) # scale_lora_layers(self, lora_scale) # diffusers/src/diffusers/models/transformers/transformer_flux.py:436 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | +- GuardManager: source=G['unscale_lora_layers'], accessed_by=DictGetItemGuardAccessor(unscale_lora_layers) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | +- GuardManager: source=G['unscale_lora_layers'].__code__, accessed_by=GetAttrGuardAccessor(__code__) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | +- ID_MATCH: ___check_obj_id(G['unscale_lora_layers'].__code__, 140585209572928) # unscale_lora_layers(self, lora_scale) # diffusers/src/diffusers/models/transformers/transformer_flux.py:553 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | +- GuardManager: source=G['__builtins_dict___0'], accessed_by=DictGetItemGuardAccessor(__builtins_dict___0) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | +- GuardManager: source=G['__builtins_dict___0']['int'], accessed_by=DictGetItemGuardAccessor(int) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | +- ID_MATCH: ___check_obj_id(G['__builtins_dict___0']['int'], 140591004461248) # if isinstance(pos, int): # diffusers/src/diffusers/models/embeddings.py:605 in get_1d_rotary_pos_embed V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | +- GuardManager: source=G['__builtins_dict___0']['len'], accessed_by=DictGetItemGuardAccessor(len) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | +- ID_MATCH: ___check_obj_id(G['__builtins_dict___0']['len'], 140590981894672) # assert len(timesteps.shape) == 1, "Timesteps should be a 1d-array" # diffusers/src/diffusers/models/embeddings.py:54 in get_timestep_embedding V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | +- GuardManager: source=G['__builtins_dict___0']['set'], accessed_by=DictGetItemGuardAccessor(set) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | +- ID_MATCH: ___check_obj_id(G['__builtins_dict___0']['set'], 140591004484896) # attn_parameters = set(inspect.signature(self.processor.__call__).parameters.keys()) # diffusers/src/diffusers/models/attention_processor.py:479 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | +- GuardManager: source=G['__builtins_dict___0']['range'], accessed_by=DictGetItemGuardAccessor(range) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | +- ID_MATCH: ___check_obj_id(G['__builtins_dict___0']['range'], 140591004481376) # for i in range(n_axes): # diffusers/src/diffusers/models/embeddings.py:696 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | +- GuardManager: source=G['__builtins_dict___0']['enumerate'], accessed_by=DictGetItemGuardAccessor(enumerate) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | +- ID_MATCH: ___check_obj_id(G['__builtins_dict___0']['enumerate'], 140591004413056) # for index_block, block in enumerate(self.transformer_blocks): # diffusers/src/diffusers/models/transformers/transformer_flux.py:471 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | +- GuardManager: source=G['__builtins_dict___0']['isinstance'], accessed_by=DictGetItemGuardAccessor(isinstance) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | +- ID_MATCH: ___check_obj_id(G['__builtins_dict___0']['isinstance'], 140590981894352) # if isinstance(pos, int): # diffusers/src/diffusers/models/embeddings.py:605 in get_1d_rotary_pos_embed V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | +- GuardManager: source=G['__import_diffusers_dot_models_dot_attention'], accessed_by=DictGetItemGuardAccessor(__import_diffusers_dot_models_dot_attention) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | +- ID_MATCH: ___check_obj_id(G['__import_diffusers_dot_models_dot_attention'], 140585077988688) # if len(args) > 0 or kwargs.get("scale", None) is not None: # diffusers/src/diffusers/models/attention.py:1197 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | +- GuardManager: source=G['__import_diffusers_dot_models_dot_embeddings'], accessed_by=DictGetItemGuardAccessor(__import_diffusers_dot_models_dot_embeddings) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | +- ID_MATCH: ___check_obj_id(G['__import_diffusers_dot_models_dot_embeddings'], 140585079518960) # t_emb = get_timestep_embedding( # diffusers/src/diffusers/models/embeddings.py:764 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | +- GuardManager: source=G['__import_diffusers_dot_models_dot_embeddings'].np, accessed_by=GetAttrGuardAccessor(np) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | +- ID_MATCH: ___check_obj_id(G['__import_diffusers_dot_models_dot_embeddings'].np, 140590976137424) # if isinstance(pos, np.ndarray): # diffusers/src/diffusers/models/embeddings.py:607 in get_1d_rotary_pos_embed V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | +- GuardManager: source=G['__import_diffusers_dot_models_dot_embeddings'].np.ndarray, accessed_by=GetAttrGuardAccessor(ndarray) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | +- ID_MATCH: ___check_obj_id(G['__import_diffusers_dot_models_dot_embeddings'].np.ndarray, 140588028923008) # if isinstance(pos, np.ndarray): # diffusers/src/diffusers/models/embeddings.py:607 in get_1d_rotary_pos_embed V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | +- GuardManager: source=G['__import_diffusers_dot_models_dot_embeddings'].math, accessed_by=GetAttrGuardAccessor(math) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | +- ID_MATCH: ___check_obj_id(G['__import_diffusers_dot_models_dot_embeddings'].math, 140590979232480) # exponent = -math.log(max_period) * torch.arange( # diffusers/src/diffusers/models/embeddings.py:57 in get_timestep_embedding V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | +- GuardManager: source=G['__import_diffusers_dot_models_dot_embeddings'].math.log, accessed_by=GetAttrGuardAccessor(log) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | +- ID_MATCH: ___check_obj_id(G['__import_diffusers_dot_models_dot_embeddings'].math.log, 140590979235360) # exponent = -math.log(max_period) * torch.arange( # diffusers/src/diffusers/models/embeddings.py:57 in get_timestep_embedding V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | +- GuardManager: source=G['__import_diffusers_dot_models_dot_embeddings'].torch, accessed_by=GetAttrGuardAccessor(torch) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | +- ID_MATCH: ___check_obj_id(G['__import_diffusers_dot_models_dot_embeddings'].torch, 140590979095808) # exponent = -math.log(max_period) * torch.arange( # diffusers/src/diffusers/models/embeddings.py:57 in get_timestep_embedding V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | +- GuardManager: source=G['__import_diffusers_dot_models_dot_embeddings'].torch.cat, accessed_by=GetAttrGuardAccessor(cat) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | +- ID_MATCH: ___check_obj_id(G['__import_diffusers_dot_models_dot_embeddings'].torch.cat, 140590976095136) # emb = torch.cat([torch.sin(emb), torch.cos(emb)], dim=-1) # diffusers/src/diffusers/models/embeddings.py:69 in get_timestep_embedding V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | +- GuardManager: source=G['__import_diffusers_dot_models_dot_embeddings'].torch.cos, accessed_by=GetAttrGuardAccessor(cos) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | +- ID_MATCH: ___check_obj_id(G['__import_diffusers_dot_models_dot_embeddings'].torch.cos, 140590976096336) # emb = torch.cat([torch.sin(emb), torch.cos(emb)], dim=-1) # diffusers/src/diffusers/models/embeddings.py:69 in get_timestep_embedding V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | +- GuardManager: source=G['__import_diffusers_dot_models_dot_embeddings'].torch.exp, accessed_by=GetAttrGuardAccessor(exp) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | +- ID_MATCH: ___check_obj_id(G['__import_diffusers_dot_models_dot_embeddings'].torch.exp, 140590976097696) # emb = torch.exp(exponent) # diffusers/src/diffusers/models/embeddings.py:62 in get_timestep_embedding V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | +- GuardManager: source=G['__import_diffusers_dot_models_dot_embeddings'].torch.sin, accessed_by=GetAttrGuardAccessor(sin) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | +- ID_MATCH: ___check_obj_id(G['__import_diffusers_dot_models_dot_embeddings'].torch.sin, 140590976106096) # emb = torch.cat([torch.sin(emb), torch.cos(emb)], dim=-1) # diffusers/src/diffusers/models/embeddings.py:69 in get_timestep_embedding V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | +- GuardManager: source=G['__import_diffusers_dot_models_dot_embeddings'].torch.outer, accessed_by=GetAttrGuardAccessor(outer) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | +- ID_MATCH: ___check_obj_id(G['__import_diffusers_dot_models_dot_embeddings'].torch.outer, 140590976134544) # freqs = torch.outer(pos, freqs) # type: ignore # [S, D/2] # diffusers/src/diffusers/models/embeddings.py:616 in get_1d_rotary_pos_embed V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | +- GuardManager: source=G['__import_diffusers_dot_models_dot_embeddings'].torch.stack, accessed_by=GetAttrGuardAccessor(stack) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | +- ID_MATCH: ___check_obj_id(G['__import_diffusers_dot_models_dot_embeddings'].torch.stack, 140590976059488) # x_rotated = torch.stack([-x_imag, x_real], dim=-1).flatten(3) # diffusers/src/diffusers/models/embeddings.py:662 in apply_rotary_emb V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | +- GuardManager: source=G['__import_diffusers_dot_models_dot_embeddings'].torch.arange, accessed_by=GetAttrGuardAccessor(arange) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | +- ID_MATCH: ___check_obj_id(G['__import_diffusers_dot_models_dot_embeddings'].torch.arange, 140590975983808) # exponent = -math.log(max_period) * torch.arange( # diffusers/src/diffusers/models/embeddings.py:57 in get_timestep_embedding V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | +- GuardManager: source=G['__import_diffusers_dot_models_dot_embeddings'].torch.float32, accessed_by=GetAttrGuardAccessor(float32) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | +- EQUALS_MATCH: G['__import_diffusers_dot_models_dot_embeddings'].torch.float32 == torch.float32 # start=0, end=half_dim, dtype=torch.float32, device=timesteps.device # diffusers/src/diffusers/models/embeddings.py:58 in get_timestep_embedding V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | +- GuardManager: source=G['__import_diffusers_dot_models_dot_embeddings'].torch.float64, accessed_by=GetAttrGuardAccessor(float64) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | +- EQUALS_MATCH: G['__import_diffusers_dot_models_dot_embeddings'].torch.float64 == torch.float64 # freqs_dtype = torch.float32 if is_mps else torch.float64 # diffusers/src/diffusers/models/embeddings.py:695 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | +- GuardManager: source=G['__import_diffusers_dot_models_dot_embeddings'].apply_rotary_emb, accessed_by=GetAttrGuardAccessor(apply_rotary_emb) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | +- GuardManager: source=G['__import_diffusers_dot_models_dot_embeddings'].apply_rotary_emb.__code__, accessed_by=GetAttrGuardAccessor(__code__) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | +- ID_MATCH: ___check_obj_id(G['__import_diffusers_dot_models_dot_embeddings'].apply_rotary_emb.__code__, 140585079325072) # from .embeddings import apply_rotary_emb # diffusers/src/diffusers/models/attention_processor.py:1760 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | +- GuardManager: source=G['__import_diffusers_dot_models_dot_embeddings'].apply_rotary_emb, accessed_by=FuncDefaultsGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | +- GuardManager: source=G['__import_diffusers_dot_models_dot_embeddings'].apply_rotary_emb.__defaults__[0], accessed_by=GetItemGuardAccessor(0) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | +- ID_MATCH: ___check_obj_id(G['__import_diffusers_dot_models_dot_embeddings'].apply_rotary_emb.__defaults__[0], 140591004393408) # if use_real: # diffusers/src/diffusers/models/embeddings.py:653 in apply_rotary_emb V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | +- GuardManager: source=G['__import_diffusers_dot_models_dot_embeddings'].apply_rotary_emb.__defaults__[1], accessed_by=GetItemGuardAccessor(1) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | +- EQUALS_MATCH: G['__import_diffusers_dot_models_dot_embeddings'].apply_rotary_emb.__defaults__[1] == -1 # if use_real_unbind_dim == -1: # diffusers/src/diffusers/models/embeddings.py:659 in apply_rotary_emb V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | +- GuardManager: source=G['__import_diffusers_dot_models_dot_embeddings'].get_timestep_embedding, accessed_by=GetAttrGuardAccessor(get_timestep_embedding) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | +- GuardManager: source=G['__import_diffusers_dot_models_dot_embeddings'].get_timestep_embedding.__code__, accessed_by=GetAttrGuardAccessor(__code__) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | +- ID_MATCH: ___check_obj_id(G['__import_diffusers_dot_models_dot_embeddings'].get_timestep_embedding.__code__, 140585079245968) # t_emb = get_timestep_embedding( # diffusers/src/diffusers/models/embeddings.py:764 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | +- GuardManager: source=G['__import_diffusers_dot_models_dot_embeddings'].get_timestep_embedding, accessed_by=FuncDefaultsGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | +- GuardManager: source=G['__import_diffusers_dot_models_dot_embeddings'].get_timestep_embedding.__defaults__[3], accessed_by=GetItemGuardAccessor(3) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | +- EQUALS_MATCH: G['__import_diffusers_dot_models_dot_embeddings'].get_timestep_embedding.__defaults__[3] == 10000 # exponent = -math.log(max_period) * torch.arange( # diffusers/src/diffusers/models/embeddings.py:57 in get_timestep_embedding V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | +- GuardManager: source=G['__import_diffusers_dot_models_dot_embeddings'].get_1d_rotary_pos_embed, accessed_by=GetAttrGuardAccessor(get_1d_rotary_pos_embed) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | +- GuardManager: source=G['__import_diffusers_dot_models_dot_embeddings'].get_1d_rotary_pos_embed.__code__, accessed_by=GetAttrGuardAccessor(__code__) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | +- ID_MATCH: ___check_obj_id(G['__import_diffusers_dot_models_dot_embeddings'].get_1d_rotary_pos_embed.__code__, 140585079258816) # cos, sin = get_1d_rotary_pos_embed( # diffusers/src/diffusers/models/embeddings.py:697 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | +- GuardManager: source=G['__import_diffusers_dot_models_dot_embeddings'].get_1d_rotary_pos_embed, accessed_by=FuncDefaultsGuardAccessor V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | +- GuardManager: source=G['__import_diffusers_dot_models_dot_embeddings'].get_1d_rotary_pos_embed.__defaults__[0], accessed_by=GetItemGuardAccessor(0) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | +- EQUALS_MATCH: G['__import_diffusers_dot_models_dot_embeddings'].get_1d_rotary_pos_embed.__defaults__[0] == 10000.0 # theta = theta * ntk_factor # diffusers/src/diffusers/models/embeddings.py:610 in get_1d_rotary_pos_embed V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | +- GuardManager: source=G['__import_diffusers_dot_models_dot_embeddings'].get_1d_rotary_pos_embed.__defaults__[2], accessed_by=GetItemGuardAccessor(2) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | +- EQUALS_MATCH: G['__import_diffusers_dot_models_dot_embeddings'].get_1d_rotary_pos_embed.__defaults__[2] == 1.0 # 1.0 # diffusers/src/diffusers/models/embeddings.py:612 in get_1d_rotary_pos_embed V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | +- GuardManager: source=G['__import_diffusers_dot_models_dot_embeddings'].get_1d_rotary_pos_embed.__defaults__[3], accessed_by=GetItemGuardAccessor(3) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | +- EQUALS_MATCH: G['__import_diffusers_dot_models_dot_embeddings'].get_1d_rotary_pos_embed.__defaults__[3] == 1.0 # theta = theta * ntk_factor # diffusers/src/diffusers/models/embeddings.py:610 in get_1d_rotary_pos_embed V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | +- GuardManager: source=G['__import_torch_dot_nn_dot_modules_dot_module'], accessed_by=DictGetItemGuardAccessor(__import_torch_dot_nn_dot_modules_dot_module) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | +- ID_MATCH: ___check_obj_id(G['__import_torch_dot_nn_dot_modules_dot_module'], 140585322849888) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | +- GuardManager: source=G['__import_torch_dot_nn_dot_modules_dot_module'].torch, accessed_by=GetAttrGuardAccessor(torch) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | +- ID_MATCH: ___check_obj_id(G['__import_torch_dot_nn_dot_modules_dot_module'].torch, 140590979095808) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | +- GuardManager: source=G['__import_torch_dot_nn_dot_modules_dot_module'].torch._C, accessed_by=GetAttrGuardAccessor(_C) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | +- ID_MATCH: ___check_obj_id(G['__import_torch_dot_nn_dot_modules_dot_module'].torch._C, 140590975498928) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | +- GuardManager: source=G['__import_torch_dot_nn_dot_modules_dot_module'].torch._C._get_tracing_state, accessed_by=GetAttrGuardAccessor(_get_tracing_state) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | +- ID_MATCH: ___check_obj_id(G['__import_torch_dot_nn_dot_modules_dot_module'].torch._C._get_tracing_state, 140585327896000) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | +- GuardManager: source=G['__import_torch_dot_nn_dot_modules_dot_module']._global_forward_hooks, accessed_by=GetAttrGuardAccessor(_global_forward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | +- TYPE_MATCH: ___check_type_id(G['__import_torch_dot_nn_dot_modules_dot_module']._global_forward_hooks, 140591004471168) # or _global_forward_hooks or _global_forward_pre_hooks): # nn/modules/module.py:1561 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | +- DICT_LENGTH: not G['__import_torch_dot_nn_dot_modules_dot_module']._global_forward_hooks # or _global_forward_hooks or _global_forward_pre_hooks): # nn/modules/module.py:1561 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | +- GuardManager: source=G['__import_torch_dot_nn_dot_modules_dot_module']._global_backward_hooks, accessed_by=GetAttrGuardAccessor(_global_backward_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | +- TYPE_MATCH: ___check_type_id(G['__import_torch_dot_nn_dot_modules_dot_module']._global_backward_hooks, 140591004471168) # or _global_backward_pre_hooks or _global_backward_hooks # nn/modules/module.py:1560 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | +- DICT_LENGTH: not G['__import_torch_dot_nn_dot_modules_dot_module']._global_backward_hooks # or _global_backward_pre_hooks or _global_backward_hooks # nn/modules/module.py:1560 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | +- GuardManager: source=G['__import_torch_dot_nn_dot_modules_dot_module']._global_forward_pre_hooks, accessed_by=GetAttrGuardAccessor(_global_forward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | +- TYPE_MATCH: ___check_type_id(G['__import_torch_dot_nn_dot_modules_dot_module']._global_forward_pre_hooks, 140591004471168) # or _global_forward_hooks or _global_forward_pre_hooks): # nn/modules/module.py:1561 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | +- DICT_LENGTH: not G['__import_torch_dot_nn_dot_modules_dot_module']._global_forward_pre_hooks # or _global_forward_hooks or _global_forward_pre_hooks): # nn/modules/module.py:1561 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | +- GuardManager: source=G['__import_torch_dot_nn_dot_modules_dot_module']._global_backward_pre_hooks, accessed_by=GetAttrGuardAccessor(_global_backward_pre_hooks) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | +- TYPE_MATCH: ___check_type_id(G['__import_torch_dot_nn_dot_modules_dot_module']._global_backward_pre_hooks, 140591004471168) # or _global_backward_pre_hooks or _global_backward_hooks # nn/modules/module.py:1560 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | +- DICT_LENGTH: not G['__import_torch_dot_nn_dot_modules_dot_module']._global_backward_pre_hooks # or _global_backward_pre_hooks or _global_backward_hooks # nn/modules/module.py:1560 in _call_impl V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | +- GuardManager: source=G['__import_diffusers_dot_models_dot_activations'], accessed_by=DictGetItemGuardAccessor(__import_diffusers_dot_models_dot_activations) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | +- ID_MATCH: ___check_obj_id(G['__import_diffusers_dot_models_dot_activations'], 140585079141968) # return F.gelu(gate, approximate=self.approximate) # diffusers/src/diffusers/models/activations.py:83 in gelu V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | +- GuardManager: source=G['__import_diffusers_dot_models_dot_activations'].F, accessed_by=GetAttrGuardAccessor(F) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | +- ID_MATCH: ___check_obj_id(G['__import_diffusers_dot_models_dot_activations'].F, 140585319847216) # return F.gelu(gate, approximate=self.approximate) # diffusers/src/diffusers/models/activations.py:83 in gelu V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | +- GuardManager: source=G['__import_diffusers_dot_models_dot_activations'].F.gelu, accessed_by=GetAttrGuardAccessor(gelu) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | +- ID_MATCH: ___check_obj_id(G['__import_diffusers_dot_models_dot_activations'].F.gelu, 140585328409424) # return F.gelu(gate, approximate=self.approximate) # diffusers/src/diffusers/models/activations.py:83 in gelu V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | +- GuardManager: source=G['__import_diffusers_dot_models_dot_normalization'], accessed_by=DictGetItemGuardAccessor(__import_diffusers_dot_models_dot_normalization) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | +- ID_MATCH: ___check_obj_id(G['__import_diffusers_dot_models_dot_normalization'], 140585079754240) # variance = hidden_states.to(torch.float32).pow(2).mean(-1, keepdim=True) # diffusers/src/diffusers/models/normalization.py:427 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | +- GuardManager: source=G['__import_diffusers_dot_models_dot_normalization'].torch, accessed_by=GetAttrGuardAccessor(torch) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | +- ID_MATCH: ___check_obj_id(G['__import_diffusers_dot_models_dot_normalization'].torch, 140590979095808) # variance = hidden_states.to(torch.float32).pow(2).mean(-1, keepdim=True) # diffusers/src/diffusers/models/normalization.py:427 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | +- GuardManager: source=G['__import_diffusers_dot_models_dot_normalization'].torch.chunk, accessed_by=GetAttrGuardAccessor(chunk) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | +- ID_MATCH: ___check_obj_id(G['__import_diffusers_dot_models_dot_normalization'].torch.chunk, 140590976095296) # scale, shift = torch.chunk(emb, 2, dim=1) # diffusers/src/diffusers/models/normalization.py:305 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | +- GuardManager: source=G['__import_diffusers_dot_models_dot_normalization'].torch.rsqrt, accessed_by=GetAttrGuardAccessor(rsqrt) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | +- ID_MATCH: ___check_obj_id(G['__import_diffusers_dot_models_dot_normalization'].torch.rsqrt, 140590976058128) # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | +- GuardManager: source=G['__import_diffusers_dot_models_dot_normalization'].torch.float16, accessed_by=GetAttrGuardAccessor(float16) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | +- EQUALS_MATCH: G['__import_diffusers_dot_models_dot_normalization'].torch.float16 == torch.float16 # if self.weight.dtype in [torch.float16, torch.bfloat16]: # diffusers/src/diffusers/models/normalization.py:432 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | +- GuardManager: source=G['__import_diffusers_dot_models_dot_normalization'].torch.float32, accessed_by=GetAttrGuardAccessor(float32) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | +- EQUALS_MATCH: G['__import_diffusers_dot_models_dot_normalization'].torch.float32 == torch.float32 # variance = hidden_states.to(torch.float32).pow(2).mean(-1, keepdim=True) # diffusers/src/diffusers/models/normalization.py:427 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | +- GuardManager: source=G['__import_diffusers_dot_models_dot_normalization'].torch.bfloat16, accessed_by=GetAttrGuardAccessor(bfloat16) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | +- EQUALS_MATCH: G['__import_diffusers_dot_models_dot_normalization'].torch.bfloat16 == torch.bfloat16 # if self.weight.dtype in [torch.float16, torch.bfloat16]: # diffusers/src/diffusers/models/normalization.py:432 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | +- GuardManager: source=G['__import_diffusers_dot_models_dot_attention_processor'], accessed_by=DictGetItemGuardAccessor(__import_diffusers_dot_models_dot_attention_processor) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | +- ID_MATCH: ___check_obj_id(G['__import_diffusers_dot_models_dot_attention_processor'], 140585079143248) # attn_parameters = set(inspect.signature(self.processor.__call__).parameters.keys()) # diffusers/src/diffusers/models/attention_processor.py:479 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | +- GuardManager: source=G['__import_diffusers_dot_models_dot_attention_processor'].F, accessed_by=GetAttrGuardAccessor(F) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | +- ID_MATCH: ___check_obj_id(G['__import_diffusers_dot_models_dot_attention_processor'].F, 140585319847216) # hidden_states = F.scaled_dot_product_attention(query, key, value, dropout_p=0.0, is_causal=False) # diffusers/src/diffusers/models/attention_processor.py:1765 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | +- GuardManager: source=G['__import_diffusers_dot_models_dot_attention_processor'].F.scaled_dot_product_attention, accessed_by=GetAttrGuardAccessor(scaled_dot_product_attention) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | +- ID_MATCH: ___check_obj_id(G['__import_diffusers_dot_models_dot_attention_processor'].F.scaled_dot_product_attention, 140585328298960) # hidden_states = F.scaled_dot_product_attention(query, key, value, dropout_p=0.0, is_causal=False) # diffusers/src/diffusers/models/attention_processor.py:1765 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | +- GuardManager: source=G['__import_diffusers_dot_models_dot_attention_processor'].torch, accessed_by=GetAttrGuardAccessor(torch) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | +- ID_MATCH: ___check_obj_id(G['__import_diffusers_dot_models_dot_attention_processor'].torch, 140590979095808) # query = torch.cat([encoder_hidden_states_query_proj, query], dim=2) # diffusers/src/diffusers/models/attention_processor.py:1755 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | +- GuardManager: source=G['__import_diffusers_dot_models_dot_attention_processor'].torch.cat, accessed_by=GetAttrGuardAccessor(cat) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | +- ID_MATCH: ___check_obj_id(G['__import_diffusers_dot_models_dot_attention_processor'].torch.cat, 140590976095136) # query = torch.cat([encoder_hidden_states_query_proj, query], dim=2) # diffusers/src/diffusers/models/attention_processor.py:1755 in __call__ V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | +- GuardManager: source=G['__import_diffusers_dot_models_dot_attention_processor'].inspect, accessed_by=GetAttrGuardAccessor(inspect) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | +- ID_MATCH: ___check_obj_id(G['__import_diffusers_dot_models_dot_attention_processor'].inspect, 140590979824624) # attn_parameters = set(inspect.signature(self.processor.__call__).parameters.keys()) # diffusers/src/diffusers/models/attention_processor.py:479 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | +- GuardManager: source=G['__import_diffusers_dot_models_dot_attention_processor'].inspect.signature, accessed_by=GetAttrGuardAccessor(signature) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | +- GuardManager: source=G['__import_diffusers_dot_models_dot_attention_processor'].inspect.signature.__code__, accessed_by=GetAttrGuardAccessor(__code__) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | | | +- ID_MATCH: ___check_obj_id(G['__import_diffusers_dot_models_dot_attention_processor'].inspect.signature.__code__, 140590977567008) # attn_parameters = set(inspect.signature(self.processor.__call__).parameters.keys()) # diffusers/src/diffusers/models/attention_processor.py:479 in forward V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | +- GuardManager: source=G['__import_peft_dot_tuners_dot_tuners_utils'], accessed_by=DictGetItemGuardAccessor(__import_peft_dot_tuners_dot_tuners_utils) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | +- GuardManager: source=G['__import_peft_dot_tuners_dot_tuners_utils'].BaseTunerLayer, accessed_by=GetAttrGuardAccessor(BaseTunerLayer) V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] | | | | +- ID_MATCH: ___check_obj_id(G['__import_peft_dot_tuners_dot_tuners_utils'].BaseTunerLayer, 93831560473968) # from peft.tuners.tuners_utils import BaseTunerLayer # diffusers/src/diffusers/utils/peft_utils.py:113 in scale_lora_layers V0909 14:34:26.702000 140590996850496 torch/_dynamo/guards.py:2148] [0/0] [__guards] V0909 14:34:55.357000 140590996850496 torch/_dynamo/guards.py:2611] [0/1] [__recompiles] Recompiling function forward in /home/sayak/diffusers/src/diffusers/models/transformers/transformer_flux.py:388 V0909 14:34:55.357000 140590996850496 torch/_dynamo/guards.py:2611] [0/1] [__recompiles] triggered by the following guard failure(s): V0909 14:34:55.357000 140590996850496 torch/_dynamo/guards.py:2611] [0/1] [__recompiles] - ___check_obj_id(L['self'].transformer_blocks[0].ff.net[0].proj, 140581773425056) # hidden_states = self.proj(hidden_states) # diffusers/src/diffusers/models/activations.py:88 in forward W0909 14:37:21.215000 140590996850496 torch/fx/experimental/symbolic_shapes.py:4449] [0/1] xindex is not in var_ranges, defaulting to unknown range. V0909 14:37:54.781000 140590996850496 torch/_dynamo/guards.py:2169] [0/1] [__guards] GUARDS: V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] TREE_GUARD_MANAGER: V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] +- RootGuardManager V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | +- DEFAULT_DEVICE: utils_device.CURRENT_DEVICE == None # _dynamo/output_graph.py:460 in init_ambient_guards V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | +- GLOBAL_STATE: ___check_global_state() V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | +- GuardManager: source=L['self'], accessed_by=DictGetItemGuardAccessor(self) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | +- ID_MATCH: ___check_obj_id(L['self'], 140581773415408) # scale_lora_layers(self, lora_scale) # diffusers/src/diffusers/models/transformers/transformer_flux.py:436 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | +- GuardManager: source=L['self'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | +- GuardManager: source=L['self'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | +- ID_MATCH: ___check_obj_id(L['self'].training, 140591004393440) # scale_lora_layers(self, lora_scale) # diffusers/src/diffusers/models/transformers/transformer_flux.py:436 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | +- GuardManager: source=L['self']._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | +- GuardManager: source=L['self'].norm_out, accessed_by=DictGetItemGuardAccessor(norm_out) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | +- ID_MATCH: ___check_obj_id(L['self'].norm_out, 140581770788240) # hidden_states = self.norm_out(hidden_states, temb) # diffusers/src/diffusers/models/transformers/transformer_flux.py:548 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | +- GuardManager: source=L['self'].norm_out.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].norm_out.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | +- GuardManager: source=L['self'].norm_out.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].norm_out.training, 140591004393440) # hidden_states = self.norm_out(hidden_states, temb) # diffusers/src/diffusers/models/transformers/transformer_flux.py:548 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | +- GuardManager: source=L['self'].norm_out._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- GuardManager: source=L['self'].norm_out.norm, accessed_by=DictGetItemGuardAccessor(norm) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].norm_out.norm, 140581765087584) # x = self.norm(x) * (1 + scale)[:, None, :] + shift[:, None, :] # diffusers/src/diffusers/models/normalization.py:306 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- GuardManager: source=L['self'].norm_out.norm.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].norm_out.norm.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].norm_out.norm.training, 140591004393440) # x = self.norm(x) * (1 + scale)[:, None, :] + shift[:, None, :] # diffusers/src/diffusers/models/normalization.py:306 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- GuardManager: source=L['self'].norm_out.silu, accessed_by=DictGetItemGuardAccessor(silu) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].norm_out.silu, 140581765087488) # emb = self.linear(self.silu(conditioning_embedding).to(x.dtype)) # diffusers/src/diffusers/models/normalization.py:304 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- GuardManager: source=L['self'].norm_out.silu.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].norm_out.silu.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].norm_out.silu.training, 140591004393440) # emb = self.linear(self.silu(conditioning_embedding).to(x.dtype)) # diffusers/src/diffusers/models/normalization.py:304 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- GuardManager: source=L['self'].norm_out.linear, accessed_by=DictGetItemGuardAccessor(linear) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].norm_out.linear, 140581765087536) # emb = self.linear(self.silu(conditioning_embedding).to(x.dtype)) # diffusers/src/diffusers/models/normalization.py:304 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- GuardManager: source=L['self'].norm_out.linear.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].norm_out.linear.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].norm_out.linear.training, 140591004393440) # emb = self.linear(self.silu(conditioning_embedding).to(x.dtype)) # diffusers/src/diffusers/models/normalization.py:304 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | +- GuardManager: source=L['self'].norm_out._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | +- GuardManager: source=L['self'].norm_out._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | +- GuardManager: source=L['self'].norm_out._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | +- GuardManager: source=L['self'].norm_out._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | +- GuardManager: source=L['self'].proj_out, accessed_by=DictGetItemGuardAccessor(proj_out) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | +- ID_MATCH: ___check_obj_id(L['self'].proj_out, 140581765087440) # output = self.proj_out(hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:549 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | +- GuardManager: source=L['self'].proj_out.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | +- GuardManager: source=L['self'].proj_out.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].proj_out.training, 140591004393440) # output = self.proj_out(hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:549 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | +- GuardManager: source=L['self'].pos_embed, accessed_by=DictGetItemGuardAccessor(pos_embed) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | +- ID_MATCH: ___check_obj_id(L['self'].pos_embed, 140581773415024) # image_rotary_emb = self.pos_embed(ids) # diffusers/src/diffusers/models/transformers/transformer_flux.py:469 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | +- GuardManager: source=L['self'].pos_embed.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].pos_embed.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | +- GuardManager: source=L['self'].pos_embed.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].pos_embed.training, 140591004393440) # image_rotary_emb = self.pos_embed(ids) # diffusers/src/diffusers/models/transformers/transformer_flux.py:469 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | +- GuardManager: source=L['self'].pos_embed.axes_dim, accessed_by=DictGetItemGuardAccessor(axes_dim) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].pos_embed.axes_dim, 140591004488512) # self.axes_dim[i], pos[:, i], repeat_interleave_real=True, use_real=True, freqs_dtype=freqs_dtype # diffusers/src/diffusers/models/embeddings.py:698 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- LENGTH_CHECK: len(L['self'].pos_embed.axes_dim) == 3 # self.axes_dim[i], pos[:, i], repeat_interleave_real=True, use_real=True, freqs_dtype=freqs_dtype # diffusers/src/diffusers/models/embeddings.py:698 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- GuardManager: source=L['self'].pos_embed.axes_dim[0], accessed_by=TupleGetItemGuardAccessor(0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- EQUALS_MATCH: L['self'].pos_embed.axes_dim[0] == 16 # cos, sin = get_1d_rotary_pos_embed( # diffusers/src/diffusers/models/embeddings.py:697 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- GuardManager: source=L['self'].pos_embed.axes_dim[1], accessed_by=TupleGetItemGuardAccessor(1) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- EQUALS_MATCH: L['self'].pos_embed.axes_dim[1] == 56 # cos, sin = get_1d_rotary_pos_embed( # diffusers/src/diffusers/models/embeddings.py:697 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- GuardManager: source=L['self'].pos_embed.axes_dim[2], accessed_by=TupleGetItemGuardAccessor(2) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- EQUALS_MATCH: L['self'].pos_embed.axes_dim[2] == 56 # cos, sin = get_1d_rotary_pos_embed( # diffusers/src/diffusers/models/embeddings.py:697 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | +- GuardManager: source=L['self'].pos_embed._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | +- GuardManager: source=L['self'].pos_embed._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | +- GuardManager: source=L['self'].pos_embed._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | +- GuardManager: source=L['self'].pos_embed._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | +- GuardManager: source=L['self'].x_embedder, accessed_by=DictGetItemGuardAccessor(x_embedder) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | +- ID_MATCH: ___check_obj_id(L['self'].x_embedder, 140581773423280) # hidden_states = self.x_embedder(hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:442 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | +- GuardManager: source=L['self'].x_embedder.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | +- GuardManager: source=L['self'].x_embedder.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].x_embedder.training, 140591004393440) # hidden_states = self.x_embedder(hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:442 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | +- GuardManager: source=L['self'].time_text_embed, accessed_by=DictGetItemGuardAccessor(time_text_embed) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | +- ID_MATCH: ___check_obj_id(L['self'].time_text_embed, 140581773422416) # else self.time_text_embed(timestep, guidance, pooled_projections) # diffusers/src/diffusers/models/transformers/transformer_flux.py:452 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | +- GuardManager: source=L['self'].time_text_embed.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].time_text_embed.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | +- GuardManager: source=L['self'].time_text_embed.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].time_text_embed.training, 140591004393440) # else self.time_text_embed(timestep, guidance, pooled_projections) # diffusers/src/diffusers/models/transformers/transformer_flux.py:452 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | +- GuardManager: source=L['self'].time_text_embed._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- GuardManager: source=L['self'].time_text_embed.time_proj, accessed_by=DictGetItemGuardAccessor(time_proj) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].time_text_embed.time_proj, 140581773415216) # timesteps_proj = self.time_proj(timestep) # diffusers/src/diffusers/models/embeddings.py:1059 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- GuardManager: source=L['self'].time_text_embed.time_proj.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].time_text_embed.time_proj.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].time_text_embed.time_proj.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].time_text_embed.time_proj.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].time_text_embed.time_proj.training, 140591004393440) # timesteps_proj = self.time_proj(timestep) # diffusers/src/diffusers/models/embeddings.py:1059 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].time_text_embed.time_proj.scale, accessed_by=DictGetItemGuardAccessor(scale) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- EQUALS_MATCH: L['self'].time_text_embed.time_proj.scale == 1 # scale=self.scale, # diffusers/src/diffusers/models/embeddings.py:769 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].time_text_embed.time_proj.num_channels, accessed_by=DictGetItemGuardAccessor(num_channels) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- EQUALS_MATCH: L['self'].time_text_embed.time_proj.num_channels == 256 # self.num_channels, # diffusers/src/diffusers/models/embeddings.py:766 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].time_text_embed.time_proj._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].time_text_embed.time_proj._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].time_text_embed.time_proj.flip_sin_to_cos, accessed_by=DictGetItemGuardAccessor(flip_sin_to_cos) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].time_text_embed.time_proj.flip_sin_to_cos, 140591004393408) # flip_sin_to_cos=self.flip_sin_to_cos, # diffusers/src/diffusers/models/embeddings.py:767 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].time_text_embed.time_proj._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].time_text_embed.time_proj._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].time_text_embed.time_proj.downscale_freq_shift, accessed_by=DictGetItemGuardAccessor(downscale_freq_shift) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- EQUALS_MATCH: L['self'].time_text_embed.time_proj.downscale_freq_shift == 0 # downscale_freq_shift=self.downscale_freq_shift, # diffusers/src/diffusers/models/embeddings.py:768 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- GuardManager: source=L['self'].time_text_embed.text_embedder, accessed_by=DictGetItemGuardAccessor(text_embedder) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].time_text_embed.text_embedder, 140581773415120) # pooled_projections = self.text_embedder(pooled_projection) # diffusers/src/diffusers/models/embeddings.py:1067 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- GuardManager: source=L['self'].time_text_embed.text_embedder.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].time_text_embed.text_embedder.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].time_text_embed.text_embedder.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].time_text_embed.text_embedder.training, 140591004393440) # pooled_projections = self.text_embedder(pooled_projection) # diffusers/src/diffusers/models/embeddings.py:1067 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].time_text_embed.text_embedder._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].time_text_embed.text_embedder.act_1, accessed_by=DictGetItemGuardAccessor(act_1) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].time_text_embed.text_embedder.act_1, 140581773423184) # hidden_states = self.act_1(hidden_states) # diffusers/src/diffusers/models/embeddings.py:1511 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].time_text_embed.text_embedder.act_1.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].time_text_embed.text_embedder.act_1.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].time_text_embed.text_embedder.act_1.training, 140591004393440) # hidden_states = self.act_1(hidden_states) # diffusers/src/diffusers/models/embeddings.py:1511 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].time_text_embed.text_embedder.linear_1, accessed_by=DictGetItemGuardAccessor(linear_1) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].time_text_embed.text_embedder.linear_1, 140581773422944) # hidden_states = self.linear_1(caption) # diffusers/src/diffusers/models/embeddings.py:1510 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].time_text_embed.text_embedder.linear_1.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].time_text_embed.text_embedder.linear_1.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].time_text_embed.text_embedder.linear_1.training, 140591004393440) # hidden_states = self.linear_1(caption) # diffusers/src/diffusers/models/embeddings.py:1510 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].time_text_embed.text_embedder.linear_2, accessed_by=DictGetItemGuardAccessor(linear_2) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].time_text_embed.text_embedder.linear_2, 140581773423232) # hidden_states = self.linear_2(hidden_states) # diffusers/src/diffusers/models/embeddings.py:1512 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].time_text_embed.text_embedder.linear_2.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].time_text_embed.text_embedder.linear_2.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].time_text_embed.text_embedder.linear_2.training, 140591004393440) # hidden_states = self.linear_2(hidden_states) # diffusers/src/diffusers/models/embeddings.py:1512 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].time_text_embed.text_embedder._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].time_text_embed.text_embedder._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].time_text_embed.text_embedder._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].time_text_embed.text_embedder._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- GuardManager: source=L['self'].time_text_embed.guidance_embedder, accessed_by=DictGetItemGuardAccessor(guidance_embedder) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].time_text_embed.guidance_embedder, 140581773422512) # guidance_emb = self.guidance_embedder(guidance_proj.to(dtype=pooled_projection.dtype)) # (N, D) # diffusers/src/diffusers/models/embeddings.py:1063 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- GuardManager: source=L['self'].time_text_embed.guidance_embedder.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].time_text_embed.guidance_embedder.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].time_text_embed.guidance_embedder.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].time_text_embed.guidance_embedder.training, 140591004393440) # guidance_emb = self.guidance_embedder(guidance_proj.to(dtype=pooled_projection.dtype)) # (N, D) # diffusers/src/diffusers/models/embeddings.py:1063 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].time_text_embed.guidance_embedder._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].time_text_embed.guidance_embedder.act, accessed_by=DictGetItemGuardAccessor(act) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].time_text_embed.guidance_embedder.act, 140585079194528) # if self.act is not None: # diffusers/src/diffusers/models/embeddings.py:745 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].time_text_embed.guidance_embedder.act.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].time_text_embed.guidance_embedder.act.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].time_text_embed.guidance_embedder.act.training, 140591004393440) # if self.act is not None: # diffusers/src/diffusers/models/embeddings.py:745 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].time_text_embed.guidance_embedder.linear_1, accessed_by=DictGetItemGuardAccessor(linear_1) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].time_text_embed.guidance_embedder.linear_1, 140581773422224) # sample = self.linear_1(sample) # diffusers/src/diffusers/models/embeddings.py:743 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].time_text_embed.guidance_embedder.linear_1.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].time_text_embed.guidance_embedder.linear_1.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].time_text_embed.guidance_embedder.linear_1.training, 140591004393440) # sample = self.linear_1(sample) # diffusers/src/diffusers/models/embeddings.py:743 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].time_text_embed.guidance_embedder.linear_2, accessed_by=DictGetItemGuardAccessor(linear_2) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].time_text_embed.guidance_embedder.linear_2, 140581773421840) # sample = self.linear_2(sample) # diffusers/src/diffusers/models/embeddings.py:748 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].time_text_embed.guidance_embedder.linear_2.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].time_text_embed.guidance_embedder.linear_2.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].time_text_embed.guidance_embedder.linear_2.training, 140591004393440) # sample = self.linear_2(sample) # diffusers/src/diffusers/models/embeddings.py:748 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].time_text_embed.guidance_embedder.post_act, accessed_by=DictGetItemGuardAccessor(post_act) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].time_text_embed.guidance_embedder.post_act, 140591004478624) # if self.post_act is not None: # diffusers/src/diffusers/models/embeddings.py:750 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].time_text_embed.guidance_embedder._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].time_text_embed.guidance_embedder._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].time_text_embed.guidance_embedder._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].time_text_embed.guidance_embedder._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- GuardManager: source=L['self'].time_text_embed.guidance_embedder.forward, accessed_by=GetAttrGuardAccessor(forward) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].time_text_embed.guidance_embedder.forward, accessed_by=FuncDefaultsGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].time_text_embed.guidance_embedder.forward.__defaults__[0], accessed_by=GetItemGuardAccessor(0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].time_text_embed.guidance_embedder.forward.__defaults__[0], 140591004478624) # if condition is not None: # diffusers/src/diffusers/models/embeddings.py:741 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- GuardManager: source=L['self'].time_text_embed.timestep_embedder, accessed_by=DictGetItemGuardAccessor(timestep_embedder) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].time_text_embed.timestep_embedder, 140581773415072) # timesteps_emb = self.timestep_embedder(timesteps_proj.to(dtype=pooled_projection.dtype)) # (N, D) # diffusers/src/diffusers/models/embeddings.py:1060 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- GuardManager: source=L['self'].time_text_embed.timestep_embedder.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].time_text_embed.timestep_embedder.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].time_text_embed.timestep_embedder.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].time_text_embed.timestep_embedder.training, 140591004393440) # timesteps_emb = self.timestep_embedder(timesteps_proj.to(dtype=pooled_projection.dtype)) # (N, D) # diffusers/src/diffusers/models/embeddings.py:1060 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].time_text_embed.timestep_embedder._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].time_text_embed.timestep_embedder.act, accessed_by=DictGetItemGuardAccessor(act) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].time_text_embed.timestep_embedder.act, 140585079194528) # if self.act is not None: # diffusers/src/diffusers/models/embeddings.py:745 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].time_text_embed.timestep_embedder.act.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].time_text_embed.timestep_embedder.act.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].time_text_embed.timestep_embedder.act.training, 140591004393440) # if self.act is not None: # diffusers/src/diffusers/models/embeddings.py:745 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].time_text_embed.timestep_embedder.linear_1, accessed_by=DictGetItemGuardAccessor(linear_1) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].time_text_embed.timestep_embedder.linear_1, 140581773422992) # sample = self.linear_1(sample) # diffusers/src/diffusers/models/embeddings.py:743 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].time_text_embed.timestep_embedder.linear_1.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].time_text_embed.timestep_embedder.linear_1.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].time_text_embed.timestep_embedder.linear_1.training, 140591004393440) # sample = self.linear_1(sample) # diffusers/src/diffusers/models/embeddings.py:743 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].time_text_embed.timestep_embedder.linear_2, accessed_by=DictGetItemGuardAccessor(linear_2) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].time_text_embed.timestep_embedder.linear_2, 140581773422848) # sample = self.linear_2(sample) # diffusers/src/diffusers/models/embeddings.py:748 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].time_text_embed.timestep_embedder.linear_2.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].time_text_embed.timestep_embedder.linear_2.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].time_text_embed.timestep_embedder.linear_2.training, 140591004393440) # sample = self.linear_2(sample) # diffusers/src/diffusers/models/embeddings.py:748 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].time_text_embed.timestep_embedder.post_act, accessed_by=DictGetItemGuardAccessor(post_act) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].time_text_embed.timestep_embedder.post_act, 140591004478624) # if self.post_act is not None: # diffusers/src/diffusers/models/embeddings.py:750 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].time_text_embed.timestep_embedder._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].time_text_embed.timestep_embedder._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].time_text_embed.timestep_embedder._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].time_text_embed.timestep_embedder._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- GuardManager: source=L['self'].time_text_embed.timestep_embedder.forward, accessed_by=GetAttrGuardAccessor(forward) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].time_text_embed.timestep_embedder.forward, accessed_by=FuncDefaultsGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].time_text_embed.timestep_embedder.forward.__defaults__[0], accessed_by=GetItemGuardAccessor(0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].time_text_embed.timestep_embedder.forward.__defaults__[0], 140591004478624) # if condition is not None: # diffusers/src/diffusers/models/embeddings.py:741 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | +- GuardManager: source=L['self'].time_text_embed._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | +- GuardManager: source=L['self'].time_text_embed._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | +- GuardManager: source=L['self'].time_text_embed._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | +- GuardManager: source=L['self'].time_text_embed._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | +- GuardManager: source=L['self'].context_embedder, accessed_by=DictGetItemGuardAccessor(context_embedder) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | +- ID_MATCH: ___check_obj_id(L['self'].context_embedder, 140581773423136) # encoder_hidden_states = self.context_embedder(encoder_hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:454 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | +- GuardManager: source=L['self'].context_embedder.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | +- GuardManager: source=L['self'].context_embedder.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].context_embedder.training, 140591004393440) # encoder_hidden_states = self.context_embedder(encoder_hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:454 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | +- GuardManager: source=L['self'].transformer_blocks, accessed_by=DictGetItemGuardAccessor(transformer_blocks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks, 140581773423376) # for index_block, block in enumerate(self.transformer_blocks): # diffusers/src/diffusers/models/transformers/transformer_flux.py:471 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | +- GuardManager: source=L['self'].transformer_blocks.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | +- GuardManager: source=L['self'].transformer_blocks.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks.training, 140591004393440) # for index_block, block in enumerate(self.transformer_blocks): # diffusers/src/diffusers/models/transformers/transformer_flux.py:471 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | +- GuardManager: source=L['self'].transformer_blocks[0], accessed_by=GetItemGuardAccessor(0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0], 140581773423328) # for index_block, block in enumerate(self.transformer_blocks): # diffusers/src/diffusers/models/transformers/transformer_flux.py:471 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[0].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].training, 140591004393440) # for index_block, block in enumerate(self.transformer_blocks): # diffusers/src/diffusers/models/transformers/transformer_flux.py:471 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff, accessed_by=DictGetItemGuardAccessor(ff) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].ff, 140581773424768) # ff_output = self.ff(norm_hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:185 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[0].ff.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].ff.training, 140591004393440) # ff_output = self.ff(norm_hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:185 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff.net, accessed_by=DictGetItemGuardAccessor(net) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].ff.net, 140581773425008) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[0].ff.net, 93831537618768) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- LENGTH_CHECK: len(L['self'].transformer_blocks[0].ff.net) == 3 # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff.net.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff.net.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].ff.net.training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff.net[0], accessed_by=GetItemGuardAccessor(0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].ff.net[0], 140581773424960) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff.net[0].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[0].ff.net[0].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff.net[0].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].ff.net[0].training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff.net[0]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff.net[0].proj, accessed_by=DictGetItemGuardAccessor(proj) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].ff.net[0].proj, 140533121654288) # hidden_states = self.proj(hidden_states) # diffusers/src/diffusers/models/activations.py:88 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff.net[0].proj.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[0].ff.net[0].proj.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff.net[0].proj.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].ff.net[0].proj.training, 140591004393408) # hidden_states = self.proj(hidden_states) # diffusers/src/diffusers/models/activations.py:88 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff.net[0].proj._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff.net[0].proj.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].ff.net[0].proj.lora_A, 140533121648528) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff.net[0].proj.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff.net[0].proj.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].ff.net[0].proj.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff.net[0].proj.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].ff.net[0].proj.lora_A['default_0'], 140533121100880) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff.net[0].proj.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff.net[0].proj.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].ff.net[0].proj.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff.net[0].proj.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff.net[0].proj.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].ff.net[0].proj.lora_A['default_0'].weight, 140537310066576) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff.net[0].proj.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].ff.net[0].proj.lora_B, 140533121662256) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff.net[0].proj.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff.net[0].proj.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].ff.net[0].proj.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff.net[0].proj.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].ff.net[0].proj.lora_B['default_0'], 140533121102560) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff.net[0].proj.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff.net[0].proj.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].ff.net[0].proj.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff.net[0].proj.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].ff.net[0].proj.base_layer, 140581773425056) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff.net[0].proj.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff.net[0].proj.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].ff.net[0].proj.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff.net[0].proj.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].ff.net[0].proj.lora_dropout, 140533121650400) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff.net[0].proj.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff.net[0].proj.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].ff.net[0].proj.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff.net[0].proj.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].ff.net[0].proj.lora_dropout['default_0'], 140533121647472) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff.net[0].proj.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff.net[0].proj.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].ff.net[0].proj.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff.net[0].proj.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[0].ff.net[0].proj.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[0].ff.net[0].proj.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff.net[0].proj.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[0].ff.net[0].proj.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff.net[0].proj.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[0].ff.net[0].proj.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[0].ff.net[0].proj.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff.net[0].proj.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].ff.net[0].proj.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff.net[0].proj._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff.net[0].proj._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff.net[0].proj.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[0].ff.net[0].proj.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[0].ff.net[0].proj.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff.net[0].proj._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].ff.net[0].proj._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff.net[0].proj._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff.net[0].proj._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff.net[0].proj._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[0].ff.net[0].proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[0].ff.net[0].proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff.net[0].approximate, accessed_by=DictGetItemGuardAccessor(approximate) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[0].ff.net[0].approximate == 'tanh' # return F.gelu(gate, approximate=self.approximate) # diffusers/src/diffusers/models/activations.py:83 in gelu V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff.net[0]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff.net[0]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff.net[0]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff.net[0]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff.net[1], accessed_by=GetItemGuardAccessor(1) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].ff.net[1], 140581773425104) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff.net[1].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff.net[1].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].ff.net[1].training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff.net[2], accessed_by=GetItemGuardAccessor(2) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].ff.net[2], 140533121104864) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff.net[2].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[0].ff.net[2].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff.net[2].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].ff.net[2].training, 140591004393408) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff.net[2]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff.net[2].lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].ff.net[2].lora_A, 140533121104000) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff.net[2].lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff.net[2].lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].ff.net[2].lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff.net[2].lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].ff.net[2].lora_A['default_0'], 140533121097040) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff.net[2].lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff.net[2].lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].ff.net[2].lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff.net[2].lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff.net[2].lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].ff.net[2].lora_A['default_0'].weight, 140537310060576) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff.net[2].lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].ff.net[2].lora_B, 140533121097328) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff.net[2].lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff.net[2].lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].ff.net[2].lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff.net[2].lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].ff.net[2].lora_B['default_0'], 140533121094544) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff.net[2].lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff.net[2].lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].ff.net[2].lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff.net[2].base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].ff.net[2].base_layer, 140581773425152) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff.net[2].base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff.net[2].base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].ff.net[2].base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff.net[2].lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].ff.net[2].lora_dropout, 140533121095840) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff.net[2].lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff.net[2].lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].ff.net[2].lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff.net[2].lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].ff.net[2].lora_dropout['default_0'], 140533121095888) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff.net[2].lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff.net[2].lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].ff.net[2].lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff.net[2].scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[0].ff.net[2].scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[0].ff.net[2].scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff.net[2].scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[0].ff.net[2].scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff.net[2].use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[0].ff.net[2].use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[0].ff.net[2].use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff.net[2].use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].ff.net[2].use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff.net[2]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff.net[2]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff.net[2].merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[0].ff.net[2].merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[0].ff.net[2].merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff.net[2]._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].ff.net[2]._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff.net[2]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff.net[2]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff.net[2]._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[0].ff.net[2]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[0].ff.net[2]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn, accessed_by=DictGetItemGuardAccessor(attn) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn, 140581773423952) # attn_output, context_attn_output = self.attn( # diffusers/src/diffusers/models/transformers/transformer_flux.py:172 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[0].attn.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.training, 140591004393440) # attn_output, context_attn_output = self.attn( # diffusers/src/diffusers/models/transformers/transformer_flux.py:172 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_k, accessed_by=DictGetItemGuardAccessor(to_k) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.to_k, 140533121598512) # key = attn.to_k(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1717 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[0].attn.to_k.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.to_k.training, 140591004393408) # key = attn.to_k(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1717 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_k._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_k.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.to_k.lora_A, 140533121603360) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_k.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_k.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.to_k.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_k.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.to_k.lora_A['default_0'], 140533121603936) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_k.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_k.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.to_k.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_k.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_k.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.to_k.lora_A['default_0'].weight, 140537310262944) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_k.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.to_k.lora_B, 140533121602304) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_k.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_k.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.to_k.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_k.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.to_k.lora_B['default_0'], 140533121605856) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_k.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_k.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.to_k.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_k.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.to_k.base_layer, 140581773424144) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_k.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_k.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.to_k.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_k.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.to_k.lora_dropout, 140533121602928) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_k.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_k.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.to_k.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_k.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.to_k.lora_dropout['default_0'], 140533121603504) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_k.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_k.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.to_k.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_k.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[0].attn.to_k.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[0].attn.to_k.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_k.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[0].attn.to_k.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_k.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[0].attn.to_k.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[0].attn.to_k.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_k.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.to_k.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_k._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_k._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_k.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[0].attn.to_k.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[0].attn.to_k.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_k._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.to_k._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_k._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_k._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_k._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[0].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[0].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_q, accessed_by=DictGetItemGuardAccessor(to_q) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.to_q, 140533121125680) # query = attn.to_q(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1716 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[0].attn.to_q.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.to_q.training, 140591004393408) # query = attn.to_q(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1716 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_q._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_q.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.to_q.lora_A, 140533121124960) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_q.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_q.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.to_q.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_q.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.to_q.lora_A['default_0'], 140533121599424) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_q.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_q.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.to_q.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_q.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_q.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.to_q.lora_A['default_0'].weight, 140537310263184) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_q.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.to_q.lora_B, 140533121125008) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_q.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_q.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.to_q.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_q.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.to_q.lora_B['default_0'], 140533121607296) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_q.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_q.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.to_q.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_q.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.to_q.base_layer, 140581773424240) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_q.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_q.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.to_q.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_q.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.to_q.lora_dropout, 140533121125392) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_q.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_q.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.to_q.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_q.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.to_q.lora_dropout['default_0'], 140533121134224) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_q.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_q.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.to_q.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_q.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[0].attn.to_q.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[0].attn.to_q.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_q.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[0].attn.to_q.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_q.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[0].attn.to_q.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[0].attn.to_q.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_q.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.to_q.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_q._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_q._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_q.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[0].attn.to_q.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[0].attn.to_q.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_q._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.to_q._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_q._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_q._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_q._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[0].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[0].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_v, accessed_by=DictGetItemGuardAccessor(to_v) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.to_v, 140533121608448) # value = attn.to_v(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1718 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_v.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[0].attn.to_v.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_v.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.to_v.training, 140591004393408) # value = attn.to_v(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1718 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_v._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_v.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.to_v.lora_A, 140533121602400) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_v.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_v.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.to_v.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_v.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.to_v.lora_A['default_0'], 140533121654528) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_v.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_v.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.to_v.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_v.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_v.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.to_v.lora_A['default_0'].weight, 140537310250144) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_v.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.to_v.lora_B, 140533121597792) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_v.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_v.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.to_v.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_v.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.to_v.lora_B['default_0'], 140533121655872) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_v.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_v.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.to_v.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_v.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.to_v.base_layer, 140581773424336) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_v.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_v.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.to_v.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_v.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.to_v.lora_dropout, 140533121599184) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_v.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_v.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.to_v.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_v.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.to_v.lora_dropout['default_0'], 140533121597696) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_v.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_v.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.to_v.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_v.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[0].attn.to_v.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[0].attn.to_v.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_v.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[0].attn.to_v.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_v.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[0].attn.to_v.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[0].attn.to_v.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_v.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.to_v.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_v._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_v._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_v.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[0].attn.to_v.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[0].attn.to_v.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_v._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.to_v._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_v._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_v._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_v._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[0].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[0].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.norm_k, accessed_by=DictGetItemGuardAccessor(norm_k) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.norm_k, 140581773424192) # if attn.norm_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1729 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.norm_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[0].attn.norm_k.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.norm_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.norm_k.training, 140591004393440) # if attn.norm_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1729 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.norm_k.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[0].attn.norm_k.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.norm_k._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.norm_k.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.norm_k.weight, 140581772779872) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.norm_k._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.norm_k._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.norm_k._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.norm_k._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.norm_q, accessed_by=DictGetItemGuardAccessor(norm_q) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.norm_q, 140581773424096) # if attn.norm_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1727 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.norm_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[0].attn.norm_q.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.norm_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.norm_q.training, 140591004393440) # if attn.norm_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1727 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.norm_q.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[0].attn.norm_q.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.norm_q._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.norm_q.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.norm_q.weight, 140581906594960) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.norm_q._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.norm_q._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.norm_q._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.norm_q._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_out, accessed_by=DictGetItemGuardAccessor(to_out) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.to_out, 140581773424528) # hidden_states = attn.to_out[0](hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1776 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_out.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_out.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.to_out.training, 140591004393440) # hidden_states = attn.to_out[0](hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1776 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_out[0], accessed_by=GetItemGuardAccessor(0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.to_out[0], 140533121653184) # hidden_states = attn.to_out[0](hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1776 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_out[0].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[0].attn.to_out[0].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_out[0].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.to_out[0].training, 140591004393408) # hidden_states = attn.to_out[0](hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1776 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_out[0]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_out[0].lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.to_out[0].lora_A, 140533121652128) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_out[0].lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_out[0].lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.to_out[0].lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_out[0].lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.to_out[0].lora_A['default_0'], 140533121653664) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_out[0].lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_out[0].lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.to_out[0].lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_out[0].lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_out[0].lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.to_out[0].lora_A['default_0'].weight, 140537310057216) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_out[0].lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.to_out[0].lora_B, 140533121651744) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_out[0].lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_out[0].lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.to_out[0].lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_out[0].lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.to_out[0].lora_B['default_0'], 140533121649488) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_out[0].lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_out[0].lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.to_out[0].lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_out[0].base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.to_out[0].base_layer, 140581773424576) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_out[0].base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_out[0].base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.to_out[0].base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_out[0].lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.to_out[0].lora_dropout, 140533121651408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_out[0].lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_out[0].lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.to_out[0].lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_out[0].lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.to_out[0].lora_dropout['default_0'], 140533121653760) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_out[0].lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_out[0].lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.to_out[0].lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_out[0].scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[0].attn.to_out[0].scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[0].attn.to_out[0].scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_out[0].scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[0].attn.to_out[0].scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_out[0].use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[0].attn.to_out[0].use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[0].attn.to_out[0].use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_out[0].use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.to_out[0].use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_out[0]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_out[0]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_out[0].merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[0].attn.to_out[0].merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[0].attn.to_out[0].merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_out[0]._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.to_out[0]._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_out[0]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_out[0]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_out[0]._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[0].attn.to_out[0]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[0].attn.to_out[0]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_out[1], accessed_by=GetItemGuardAccessor(1) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.to_out[1], 140581773424624) # hidden_states = attn.to_out[1](hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1778 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_out[1].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_out[1].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.to_out[1].training, 140591004393440) # hidden_states = attn.to_out[1](hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1778 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.add_k_proj, accessed_by=DictGetItemGuardAccessor(add_k_proj) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.add_k_proj, 140533121657072) # encoder_hidden_states_key_proj = attn.add_k_proj(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1736 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.add_k_proj.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[0].attn.add_k_proj.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.add_k_proj.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.add_k_proj.training, 140591004393408) # encoder_hidden_states_key_proj = attn.add_k_proj(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1736 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.add_k_proj._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.add_k_proj.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.add_k_proj.lora_A, 140533121650976) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.add_k_proj.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.add_k_proj.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.add_k_proj.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.add_k_proj.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.add_k_proj.lora_A['default_0'], 140533121657888) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.add_k_proj.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.add_k_proj.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.add_k_proj.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.add_k_proj.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.add_k_proj.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.add_k_proj.lora_A['default_0'].weight, 140537310256384) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.add_k_proj.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.add_k_proj.lora_B, 140533121657648) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.add_k_proj.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.add_k_proj.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.add_k_proj.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.add_k_proj.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.add_k_proj.lora_B['default_0'], 140533121657408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.add_k_proj.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.add_k_proj.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.add_k_proj.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.add_k_proj.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.add_k_proj.base_layer, 140581773424384) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.add_k_proj.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.add_k_proj.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.add_k_proj.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.add_k_proj.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.add_k_proj.lora_dropout, 140533121650880) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.add_k_proj.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.add_k_proj.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.add_k_proj.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.add_k_proj.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.add_k_proj.lora_dropout['default_0'], 140533121655920) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.add_k_proj.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.add_k_proj.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.add_k_proj.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.add_k_proj.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[0].attn.add_k_proj.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[0].attn.add_k_proj.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.add_k_proj.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[0].attn.add_k_proj.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.add_k_proj.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[0].attn.add_k_proj.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[0].attn.add_k_proj.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.add_k_proj.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.add_k_proj.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.add_k_proj._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.add_k_proj._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.add_k_proj.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[0].attn.add_k_proj.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[0].attn.add_k_proj.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.add_k_proj._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.add_k_proj._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.add_k_proj._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.add_k_proj._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.add_k_proj._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[0].attn.add_k_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[0].attn.add_k_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.add_q_proj, accessed_by=DictGetItemGuardAccessor(add_q_proj) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.add_q_proj, 140533121648240) # encoder_hidden_states_query_proj = attn.add_q_proj(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1735 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.add_q_proj.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[0].attn.add_q_proj.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.add_q_proj.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.add_q_proj.training, 140591004393408) # encoder_hidden_states_query_proj = attn.add_q_proj(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1735 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.add_q_proj._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.add_q_proj.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.add_q_proj.lora_A, 140533121647760) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.add_q_proj.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.add_q_proj.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.add_q_proj.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.add_q_proj.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.add_q_proj.lora_A['default_0'], 140533121651168) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.add_q_proj.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.add_q_proj.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.add_q_proj.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.add_q_proj.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.add_q_proj.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.add_q_proj.lora_A['default_0'].weight, 140537310064176) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.add_q_proj.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.add_q_proj.lora_B, 140533121651120) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.add_q_proj.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.add_q_proj.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.add_q_proj.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.add_q_proj.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.add_q_proj.lora_B['default_0'], 140533121656976) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.add_q_proj.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.add_q_proj.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.add_q_proj.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.add_q_proj.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.add_q_proj.base_layer, 140581773424480) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.add_q_proj.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.add_q_proj.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.add_q_proj.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.add_q_proj.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.add_q_proj.lora_dropout, 140533121655296) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.add_q_proj.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.add_q_proj.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.add_q_proj.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.add_q_proj.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.add_q_proj.lora_dropout['default_0'], 140533121656064) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.add_q_proj.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.add_q_proj.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.add_q_proj.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.add_q_proj.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[0].attn.add_q_proj.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[0].attn.add_q_proj.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.add_q_proj.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[0].attn.add_q_proj.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.add_q_proj.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[0].attn.add_q_proj.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[0].attn.add_q_proj.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.add_q_proj.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.add_q_proj.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.add_q_proj._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.add_q_proj._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.add_q_proj.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[0].attn.add_q_proj.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[0].attn.add_q_proj.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.add_q_proj._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.add_q_proj._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.add_q_proj._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.add_q_proj._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.add_q_proj._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[0].attn.add_q_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[0].attn.add_q_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.add_v_proj, accessed_by=DictGetItemGuardAccessor(add_v_proj) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.add_v_proj, 140533121648384) # encoder_hidden_states_value_proj = attn.add_v_proj(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1737 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.add_v_proj.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[0].attn.add_v_proj.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.add_v_proj.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.add_v_proj.training, 140591004393408) # encoder_hidden_states_value_proj = attn.add_v_proj(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1737 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.add_v_proj._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.add_v_proj.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.add_v_proj.lora_A, 140533121659760) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.add_v_proj.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.add_v_proj.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.add_v_proj.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.add_v_proj.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.add_v_proj.lora_A['default_0'], 140533121657216) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.add_v_proj.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.add_v_proj.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.add_v_proj.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.add_v_proj.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.add_v_proj.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.add_v_proj.lora_A['default_0'].weight, 140537310253024) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.add_v_proj.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.add_v_proj.lora_B, 140533121659184) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.add_v_proj.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.add_v_proj.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.add_v_proj.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.add_v_proj.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.add_v_proj.lora_B['default_0'], 140533121662448) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.add_v_proj.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.add_v_proj.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.add_v_proj.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.add_v_proj.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.add_v_proj.base_layer, 140581773424432) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.add_v_proj.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.add_v_proj.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.add_v_proj.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.add_v_proj.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.add_v_proj.lora_dropout, 140533121659232) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.add_v_proj.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.add_v_proj.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.add_v_proj.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.add_v_proj.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.add_v_proj.lora_dropout['default_0'], 140533121659280) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.add_v_proj.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.add_v_proj.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.add_v_proj.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.add_v_proj.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[0].attn.add_v_proj.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[0].attn.add_v_proj.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.add_v_proj.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[0].attn.add_v_proj.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.add_v_proj.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[0].attn.add_v_proj.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[0].attn.add_v_proj.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.add_v_proj.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.add_v_proj.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.add_v_proj._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.add_v_proj._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.add_v_proj.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[0].attn.add_v_proj.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[0].attn.add_v_proj.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.add_v_proj._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.add_v_proj._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.add_v_proj._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.add_v_proj._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.add_v_proj._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[0].attn.add_v_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[0].attn.add_v_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_add_out, accessed_by=DictGetItemGuardAccessor(to_add_out) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.to_add_out, 140533121651840) # encoder_hidden_states = attn.to_add_out(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1779 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_add_out.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[0].attn.to_add_out.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_add_out.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.to_add_out.training, 140591004393408) # encoder_hidden_states = attn.to_add_out(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1779 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_add_out._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_add_out.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.to_add_out.lora_A, 140533121653808) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_add_out.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_add_out.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.to_add_out.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_add_out.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.to_add_out.lora_A['default_0'], 140533121650448) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_add_out.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_add_out.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.to_add_out.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_add_out.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_add_out.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.to_add_out.lora_A['default_0'].weight, 140537310064816) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_add_out.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.to_add_out.lora_B, 140533121648288) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_add_out.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_add_out.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.to_add_out.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_add_out.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.to_add_out.lora_B['default_0'], 140533121649728) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_add_out.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_add_out.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.to_add_out.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_add_out.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.to_add_out.base_layer, 140581773424672) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_add_out.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_add_out.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.to_add_out.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_add_out.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.to_add_out.lora_dropout, 140533121647904) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_add_out.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_add_out.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.to_add_out.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_add_out.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.to_add_out.lora_dropout['default_0'], 140533121650016) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_add_out.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_add_out.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.to_add_out.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_add_out.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[0].attn.to_add_out.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[0].attn.to_add_out.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_add_out.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[0].attn.to_add_out.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_add_out.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[0].attn.to_add_out.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[0].attn.to_add_out.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_add_out.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.to_add_out.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_add_out._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_add_out._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_add_out.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[0].attn.to_add_out.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[0].attn.to_add_out.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_add_out._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.to_add_out._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_add_out._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_add_out._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_add_out._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[0].attn.to_add_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[0].attn.to_add_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.norm_added_k, accessed_by=DictGetItemGuardAccessor(norm_added_k) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.norm_added_k, 140581773424816) # if attn.norm_added_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1751 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.norm_added_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[0].attn.norm_added_k.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.norm_added_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.norm_added_k.training, 140591004393440) # if attn.norm_added_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1751 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.norm_added_k.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[0].attn.norm_added_k.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.norm_added_k._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.norm_added_k.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.norm_added_k.weight, 140581766060672) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.norm_added_k._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.norm_added_k._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.norm_added_k._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.norm_added_k._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.norm_added_q, accessed_by=DictGetItemGuardAccessor(norm_added_q) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.norm_added_q, 140581773424720) # if attn.norm_added_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1749 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.norm_added_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[0].attn.norm_added_q.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.norm_added_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.norm_added_q.training, 140591004393440) # if attn.norm_added_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1749 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.norm_added_q.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[0].attn.norm_added_q.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.norm_added_q._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.norm_added_q.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.norm_added_q.weight, 140581765982592) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.norm_added_q._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.norm_added_q._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.norm_added_q._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.norm_added_q._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.heads, accessed_by=DictGetItemGuardAccessor(heads) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[0].attn.heads == 24 # head_dim = inner_dim // attn.heads # diffusers/src/diffusers/models/attention_processor.py:1721 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.processor, accessed_by=DictGetItemGuardAccessor(processor) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[0].attn.processor, 93831581524080) # attn_parameters = set(inspect.signature(self.processor.__call__).parameters.keys()) # diffusers/src/diffusers/models/attention_processor.py:479 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.processor, 140581773423904) # return self.processor( # diffusers/src/diffusers/models/attention_processor.py:490 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].norm1, accessed_by=DictGetItemGuardAccessor(norm1) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].norm1, 140581773423472) # norm_hidden_states, gate_msa, shift_mlp, scale_mlp, gate_mlp = self.norm1(hidden_states, emb=temb) # diffusers/src/diffusers/models/transformers/transformer_flux.py:165 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].norm1.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[0].norm1.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].norm1.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].norm1.training, 140591004393440) # norm_hidden_states, gate_msa, shift_mlp, scale_mlp, gate_mlp = self.norm1(hidden_states, emb=temb) # diffusers/src/diffusers/models/transformers/transformer_flux.py:165 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].norm1.emb, accessed_by=DictGetItemGuardAccessor(emb) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].norm1.emb, 140591004478624) # if self.emb is not None: # diffusers/src/diffusers/models/normalization.py:135 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].norm1._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].norm1.norm, accessed_by=DictGetItemGuardAccessor(norm) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].norm1.norm, 140581773423664) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:139 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].norm1.norm.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].norm1.norm.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].norm1.norm.training, 140591004393440) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:139 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].norm1.silu, accessed_by=DictGetItemGuardAccessor(silu) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].norm1.silu, 140581773423568) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].norm1.silu.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].norm1.silu.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].norm1.silu.training, 140591004393440) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].norm1.linear, accessed_by=DictGetItemGuardAccessor(linear) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].norm1.linear, 140533131173008) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].norm1.linear.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[0].norm1.linear.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].norm1.linear.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].norm1.linear.training, 140591004393408) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].norm1.linear._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].norm1.linear.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].norm1.linear.lora_A, 140533129896016) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].norm1.linear.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].norm1.linear.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].norm1.linear.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].norm1.linear.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].norm1.linear.lora_A['default_0'], 140533121018576) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].norm1.linear.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].norm1.linear.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].norm1.linear.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].norm1.linear.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].norm1.linear.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].norm1.linear.lora_A['default_0'].weight, 140537214709296) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].norm1.linear.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].norm1.linear.lora_B, 140533153970016) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].norm1.linear.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].norm1.linear.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].norm1.linear.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].norm1.linear.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].norm1.linear.lora_B['default_0'], 140533121011856) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].norm1.linear.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].norm1.linear.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].norm1.linear.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].norm1.linear.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].norm1.linear.base_layer, 140581773423616) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].norm1.linear.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].norm1.linear.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].norm1.linear.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].norm1.linear.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].norm1.linear.lora_dropout, 140533129903264) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].norm1.linear.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].norm1.linear.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].norm1.linear.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].norm1.linear.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].norm1.linear.lora_dropout['default_0'], 140533129903936) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].norm1.linear.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].norm1.linear.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].norm1.linear.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].norm1.linear.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[0].norm1.linear.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[0].norm1.linear.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].norm1.linear.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[0].norm1.linear.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].norm1.linear.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[0].norm1.linear.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[0].norm1.linear.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].norm1.linear.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].norm1.linear.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].norm1.linear._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].norm1.linear._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[0].norm1.linear._active_adapter, 140591004458752) # for active_adapter in self.active_adapters: # peft/tuners/lora/layer.py:559 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: len(L['self'].transformer_blocks[0].norm1.linear._active_adapter) == 1 # for active_adapter in self.active_adapters: # peft/tuners/lora/layer.py:559 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[0].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[0].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[0].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[0].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[0].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[0].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[0].ff.net[2]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[0].ff.net[2]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[1].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[1].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[1].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[1].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[1].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[1].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[1].ff.net[2]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[1].ff.net[2]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[2].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[2].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[2].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[2].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[2].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[2].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[2].ff.net[2]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[2].ff.net[2]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[3].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[3].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[3].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[3].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[3].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[3].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[3].ff.net[2]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[3].ff.net[2]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[4].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[4].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[4].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[4].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[4].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[4].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[4].ff.net[2]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[4].ff.net[2]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[5].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[5].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[5].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[5].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[5].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[5].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[5].ff.net[2]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[5].ff.net[2]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[6].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[6].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[6].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[6].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[6].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[6].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[6].ff.net[2]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[6].ff.net[2]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[7].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[7].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[7].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[7].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[7].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[7].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[7].ff.net[2]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[7].ff.net[2]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[8].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[8].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[8].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[8].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[8].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[8].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[8].ff.net[2]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[8].ff.net[2]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[9].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[9].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[9].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[9].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[9].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[9].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[9].ff.net[2]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[9].ff.net[2]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[10].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[10].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[10].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[10].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[10].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[10].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[10].ff.net[2]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[10].ff.net[2]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[11].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[11].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[11].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[11].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[11].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[11].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[11].ff.net[2]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[11].ff.net[2]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[12].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[12].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[12].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[12].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[12].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[12].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[12].ff.net[2]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[12].ff.net[2]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[13].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[13].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[13].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[13].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[13].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[13].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[13].ff.net[2]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[13].ff.net[2]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[14].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[14].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[14].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[14].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[14].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[14].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[14].ff.net[2]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[14].ff.net[2]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[15].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[15].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[15].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[15].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[15].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[15].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[15].ff.net[2]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[15].ff.net[2]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[16].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[16].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[16].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[16].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[16].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[16].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[16].ff.net[2]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[16].ff.net[2]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[17].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[17].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[17].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[17].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[17].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[17].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[17].ff.net[2]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[17].ff.net[2]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[18].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[18].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[18].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[18].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[18].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[18].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[18].ff.net[2]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[18].ff.net[2]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[1].norm1.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[1].norm1.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[2].norm1.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[2].norm1.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[3].norm1.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[3].norm1.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[4].norm1.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[4].norm1.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[5].norm1.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[5].norm1.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[6].norm1.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[6].norm1.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[7].norm1.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[7].norm1.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[8].norm1.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[8].norm1.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[9].norm1.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[9].norm1.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[10].norm1.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[10].norm1.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[11].norm1.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[11].norm1.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[12].norm1.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[12].norm1.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[13].norm1.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[13].norm1.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[14].norm1.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[14].norm1.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[15].norm1.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[15].norm1.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[16].norm1.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[16].norm1.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[17].norm1.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[17].norm1.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[18].norm1.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[18].norm1.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[0].attn.to_out[0]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[0].attn.to_out[0]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[0].ff.net[0].proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[0].ff.net[0].proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[1].attn.to_out[0]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[1].attn.to_out[0]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[1].ff.net[0].proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[1].ff.net[0].proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[2].attn.to_out[0]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[2].attn.to_out[0]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[2].ff.net[0].proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[2].ff.net[0].proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[3].attn.to_out[0]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[3].attn.to_out[0]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[3].ff.net[0].proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[3].ff.net[0].proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[4].attn.to_out[0]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[4].attn.to_out[0]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[4].ff.net[0].proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[4].ff.net[0].proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[5].attn.to_out[0]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[5].attn.to_out[0]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[5].ff.net[0].proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[5].ff.net[0].proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[6].attn.to_out[0]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[6].attn.to_out[0]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[6].ff.net[0].proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[6].ff.net[0].proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[7].attn.to_out[0]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[7].attn.to_out[0]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[7].ff.net[0].proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[7].ff.net[0].proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[8].attn.to_out[0]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[8].attn.to_out[0]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[8].ff.net[0].proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[8].ff.net[0].proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[9].attn.to_out[0]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[9].attn.to_out[0]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[9].ff.net[0].proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[9].ff.net[0].proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[0].proj_mlp._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[0].proj_mlp._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[0].proj_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[0].proj_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[1].proj_mlp._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[1].proj_mlp._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[1].proj_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[1].proj_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[2].proj_mlp._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[2].proj_mlp._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[2].proj_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[2].proj_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[3].proj_mlp._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[3].proj_mlp._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[3].proj_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[3].proj_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[4].proj_mlp._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[4].proj_mlp._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[4].proj_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[4].proj_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[5].proj_mlp._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[5].proj_mlp._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[5].proj_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[5].proj_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[6].proj_mlp._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[6].proj_mlp._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[6].proj_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[6].proj_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[7].proj_mlp._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[7].proj_mlp._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[7].proj_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[7].proj_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[8].proj_mlp._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[8].proj_mlp._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[8].proj_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[8].proj_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[9].proj_mlp._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[9].proj_mlp._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[9].proj_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[9].proj_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[0].attn.add_k_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[0].attn.add_k_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[0].attn.add_q_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[0].attn.add_q_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[0].attn.add_v_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[0].attn.add_v_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[0].attn.to_add_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[0].attn.to_add_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[10].attn.to_out[0]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[10].attn.to_out[0]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[10].ff.net[0].proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[10].ff.net[0].proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[11].attn.to_out[0]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[11].attn.to_out[0]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[11].ff.net[0].proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[11].ff.net[0].proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[12].attn.to_out[0]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[12].attn.to_out[0]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[12].ff.net[0].proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[12].ff.net[0].proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[13].attn.to_out[0]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[13].attn.to_out[0]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[13].ff.net[0].proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[13].ff.net[0].proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[14].attn.to_out[0]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[14].attn.to_out[0]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[14].ff.net[0].proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[14].ff.net[0].proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[15].attn.to_out[0]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[15].attn.to_out[0]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[15].ff.net[0].proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[15].ff.net[0].proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[16].attn.to_out[0]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[16].attn.to_out[0]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[16].ff.net[0].proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[16].ff.net[0].proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[17].attn.to_out[0]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[17].attn.to_out[0]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[17].ff.net[0].proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[17].ff.net[0].proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[18].attn.to_out[0]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[18].attn.to_out[0]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[18].ff.net[0].proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[18].ff.net[0].proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[1].attn.add_k_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[1].attn.add_k_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[1].attn.add_q_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[1].attn.add_q_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[1].attn.add_v_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[1].attn.add_v_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[1].attn.to_add_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[1].attn.to_add_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[2].attn.add_k_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[2].attn.add_k_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[2].attn.add_q_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[2].attn.add_q_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[2].attn.add_v_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[2].attn.add_v_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[2].attn.to_add_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[2].attn.to_add_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[3].attn.add_k_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[3].attn.add_k_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[3].attn.add_q_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[3].attn.add_q_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[3].attn.add_v_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[3].attn.add_v_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[3].attn.to_add_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[3].attn.to_add_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[4].attn.add_k_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[4].attn.add_k_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[4].attn.add_q_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[4].attn.add_q_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[4].attn.add_v_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[4].attn.add_v_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[4].attn.to_add_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[4].attn.to_add_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[5].attn.add_k_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[5].attn.add_k_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[5].attn.add_q_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[5].attn.add_q_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[5].attn.add_v_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[5].attn.add_v_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[5].attn.to_add_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[5].attn.to_add_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[6].attn.add_k_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[6].attn.add_k_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[6].attn.add_q_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[6].attn.add_q_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[6].attn.add_v_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[6].attn.add_v_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[6].attn.to_add_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[6].attn.to_add_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[7].attn.add_k_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[7].attn.add_k_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[7].attn.add_q_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[7].attn.add_q_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[7].attn.add_v_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[7].attn.add_v_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[7].attn.to_add_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[7].attn.to_add_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[8].attn.add_k_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[8].attn.add_k_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[8].attn.add_q_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[8].attn.add_q_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[8].attn.add_v_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[8].attn.add_v_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[8].attn.to_add_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[8].attn.to_add_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[9].attn.add_k_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[9].attn.add_k_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[9].attn.add_q_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[9].attn.add_q_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[9].attn.add_v_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[9].attn.add_v_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[9].attn.to_add_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[9].attn.to_add_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[0].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[0].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[0].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[0].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[0].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[0].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[10].proj_mlp._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[10].proj_mlp._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[10].proj_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[10].proj_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[11].proj_mlp._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[11].proj_mlp._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[11].proj_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[11].proj_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[12].proj_mlp._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[12].proj_mlp._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[12].proj_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[12].proj_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[13].proj_mlp._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[13].proj_mlp._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[13].proj_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[13].proj_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[14].proj_mlp._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[14].proj_mlp._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[14].proj_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[14].proj_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[15].proj_mlp._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[15].proj_mlp._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[15].proj_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[15].proj_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[16].proj_mlp._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[16].proj_mlp._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[16].proj_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[16].proj_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[17].proj_mlp._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[17].proj_mlp._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[17].proj_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[17].proj_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[18].proj_mlp._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[18].proj_mlp._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[18].proj_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[18].proj_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[19].proj_mlp._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[19].proj_mlp._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[19].proj_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[19].proj_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[1].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[1].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[1].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[1].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[1].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[1].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[20].proj_mlp._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[20].proj_mlp._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[20].proj_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[20].proj_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[21].proj_mlp._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[21].proj_mlp._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[21].proj_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[21].proj_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[22].proj_mlp._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[22].proj_mlp._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[22].proj_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[22].proj_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[23].proj_mlp._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[23].proj_mlp._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[23].proj_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[23].proj_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[24].proj_mlp._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[24].proj_mlp._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[24].proj_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[24].proj_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[25].proj_mlp._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[25].proj_mlp._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[25].proj_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[25].proj_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[26].proj_mlp._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[26].proj_mlp._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[26].proj_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[26].proj_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[27].proj_mlp._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[27].proj_mlp._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[27].proj_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[27].proj_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[28].proj_mlp._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[28].proj_mlp._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[28].proj_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[28].proj_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[29].proj_mlp._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[29].proj_mlp._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[29].proj_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[29].proj_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[2].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[2].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[2].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[2].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[2].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[2].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[30].proj_mlp._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[30].proj_mlp._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[30].proj_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[30].proj_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[31].proj_mlp._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[31].proj_mlp._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[31].proj_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[31].proj_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[32].proj_mlp._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[32].proj_mlp._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[32].proj_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[32].proj_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[33].proj_mlp._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[33].proj_mlp._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[33].proj_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[33].proj_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[34].proj_mlp._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[34].proj_mlp._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[34].proj_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[34].proj_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[35].proj_mlp._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[35].proj_mlp._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[35].proj_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[35].proj_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[36].proj_mlp._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[36].proj_mlp._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[36].proj_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[36].proj_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[37].proj_mlp._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[37].proj_mlp._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[37].proj_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[37].proj_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[3].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[3].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[3].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[3].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[3].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[3].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[4].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[4].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[4].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[4].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[4].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[4].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[5].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[5].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[5].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[5].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[5].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[5].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[6].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[6].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[6].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[6].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[6].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[6].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[7].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[7].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[7].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[7].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[7].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[7].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[8].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[8].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[8].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[8].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[8].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[8].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[9].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[9].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[9].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[9].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[9].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[9].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[10].attn.add_k_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[10].attn.add_k_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[10].attn.add_q_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[10].attn.add_q_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[10].attn.add_v_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[10].attn.add_v_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[10].attn.to_add_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[10].attn.to_add_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[11].attn.add_k_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[11].attn.add_k_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[11].attn.add_q_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[11].attn.add_q_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[11].attn.add_v_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[11].attn.add_v_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[11].attn.to_add_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[11].attn.to_add_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[12].attn.add_k_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[12].attn.add_k_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[12].attn.add_q_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[12].attn.add_q_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[12].attn.add_v_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[12].attn.add_v_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[12].attn.to_add_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[12].attn.to_add_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[13].attn.add_k_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[13].attn.add_k_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[13].attn.add_q_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[13].attn.add_q_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[13].attn.add_v_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[13].attn.add_v_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[13].attn.to_add_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[13].attn.to_add_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[14].attn.add_k_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[14].attn.add_k_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[14].attn.add_q_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[14].attn.add_q_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[14].attn.add_v_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[14].attn.add_v_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[14].attn.to_add_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[14].attn.to_add_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[15].attn.add_k_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[15].attn.add_k_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[15].attn.add_q_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[15].attn.add_q_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[15].attn.add_v_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[15].attn.add_v_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[15].attn.to_add_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[15].attn.to_add_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[16].attn.add_k_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[16].attn.add_k_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[16].attn.add_q_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[16].attn.add_q_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[16].attn.add_v_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[16].attn.add_v_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[16].attn.to_add_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[16].attn.to_add_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[17].attn.add_k_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[17].attn.add_k_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[17].attn.add_q_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[17].attn.add_q_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[17].attn.add_v_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[17].attn.add_v_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[17].attn.to_add_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[17].attn.to_add_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[18].attn.add_k_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[18].attn.add_k_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[18].attn.add_q_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[18].attn.add_q_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[18].attn.add_v_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[18].attn.add_v_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[18].attn.to_add_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[18].attn.to_add_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[10].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[10].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[10].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[10].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[10].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[10].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[11].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[11].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[11].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[11].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[11].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[11].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[12].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[12].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[12].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[12].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[12].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[12].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[13].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[13].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[13].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[13].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[13].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[13].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[14].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[14].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[14].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[14].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[14].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[14].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[15].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[15].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[15].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[15].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[15].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[15].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[16].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[16].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[16].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[16].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[16].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[16].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[17].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[17].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[17].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[17].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[17].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[17].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[18].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[18].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[18].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[18].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[18].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[18].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[19].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[19].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[19].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[19].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[19].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[19].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[20].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[20].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[20].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[20].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[20].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[20].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[21].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[21].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[21].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[21].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[21].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[21].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[22].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[22].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[22].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[22].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[22].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[22].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[23].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[23].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[23].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[23].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[23].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[23].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[24].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[24].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[24].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[24].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[24].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[24].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[25].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[25].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[25].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[25].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[25].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[25].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[26].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[26].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[26].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[26].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[26].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[26].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[27].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[27].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[27].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[27].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[27].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[27].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[28].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[28].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[28].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[28].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[28].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[28].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[29].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[29].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[29].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[29].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[29].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[29].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[30].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[30].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[30].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[30].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[30].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[30].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[31].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[31].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[31].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[31].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[31].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[31].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[32].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[32].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[32].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[32].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[32].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[32].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[33].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[33].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[33].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[33].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[33].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[33].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[34].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[34].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[34].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[34].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[34].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[34].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[35].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[35].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[35].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[35].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[35].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[35].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[36].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[36].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[36].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[36].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[36].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[36].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[37].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[37].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[37].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[37].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[37].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[37].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[0].ff_context.net[2]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[0].ff_context.net[2]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[1].ff_context.net[2]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[1].ff_context.net[2]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[2].ff_context.net[2]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[2].ff_context.net[2]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[3].ff_context.net[2]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[3].ff_context.net[2]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[4].ff_context.net[2]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[4].ff_context.net[2]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[5].ff_context.net[2]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[5].ff_context.net[2]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[6].ff_context.net[2]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[6].ff_context.net[2]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[7].ff_context.net[2]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[7].ff_context.net[2]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[8].ff_context.net[2]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[8].ff_context.net[2]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[9].ff_context.net[2]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[9].ff_context.net[2]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[0].norm.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[0].norm.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[1].norm.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[1].norm.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[2].norm.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[2].norm.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[3].norm.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[3].norm.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[4].norm.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[4].norm.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[5].norm.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[5].norm.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[6].norm.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[6].norm.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[7].norm.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[7].norm.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[8].norm.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[8].norm.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[9].norm.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[9].norm.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[10].ff_context.net[2]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[10].ff_context.net[2]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[11].ff_context.net[2]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[11].ff_context.net[2]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[12].ff_context.net[2]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[12].ff_context.net[2]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[13].ff_context.net[2]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[13].ff_context.net[2]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[14].ff_context.net[2]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[14].ff_context.net[2]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[15].ff_context.net[2]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[15].ff_context.net[2]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[16].ff_context.net[2]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[16].ff_context.net[2]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[17].ff_context.net[2]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[17].ff_context.net[2]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[18].ff_context.net[2]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[18].ff_context.net[2]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[10].norm.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[10].norm.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[11].norm.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[11].norm.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[12].norm.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[12].norm.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[13].norm.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[13].norm.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[14].norm.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[14].norm.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[15].norm.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[15].norm.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[16].norm.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[16].norm.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[17].norm.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[17].norm.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[18].norm.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[18].norm.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[19].norm.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[19].norm.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[20].norm.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[20].norm.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[21].norm.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[21].norm.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[22].norm.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[22].norm.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[23].norm.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[23].norm.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[24].norm.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[24].norm.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[25].norm.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[25].norm.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[26].norm.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[26].norm.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[27].norm.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[27].norm.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[28].norm.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[28].norm.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[29].norm.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[29].norm.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[30].norm.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[30].norm.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[31].norm.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[31].norm.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[32].norm.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[32].norm.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[33].norm.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[33].norm.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[34].norm.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[34].norm.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[35].norm.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[35].norm.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[36].norm.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[36].norm.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[37].norm.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[37].norm.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[0].norm1_context.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[0].norm1_context.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[1].norm1_context.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[1].norm1_context.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[2].norm1_context.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[2].norm1_context.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[3].norm1_context.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[3].norm1_context.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[4].norm1_context.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[4].norm1_context.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[5].norm1_context.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[5].norm1_context.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[6].norm1_context.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[6].norm1_context.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[7].norm1_context.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[7].norm1_context.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[8].norm1_context.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[8].norm1_context.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[9].norm1_context.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[9].norm1_context.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[10].norm1_context.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[10].norm1_context.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[11].norm1_context.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[11].norm1_context.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[12].norm1_context.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[12].norm1_context.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[13].norm1_context.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[13].norm1_context.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[14].norm1_context.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[14].norm1_context.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[15].norm1_context.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[15].norm1_context.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[16].norm1_context.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[16].norm1_context.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[17].norm1_context.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[17].norm1_context.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[18].norm1_context.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[18].norm1_context.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[0].ff_context.net[0].proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[0].ff_context.net[0].proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[1].ff_context.net[0].proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[1].ff_context.net[0].proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[2].ff_context.net[0].proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[2].ff_context.net[0].proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[3].ff_context.net[0].proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[3].ff_context.net[0].proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[4].ff_context.net[0].proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[4].ff_context.net[0].proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[5].ff_context.net[0].proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[5].ff_context.net[0].proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[6].ff_context.net[0].proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[6].ff_context.net[0].proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[7].ff_context.net[0].proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[7].ff_context.net[0].proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[8].ff_context.net[0].proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[8].ff_context.net[0].proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[9].ff_context.net[0].proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[9].ff_context.net[0].proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[10].ff_context.net[0].proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[10].ff_context.net[0].proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[11].ff_context.net[0].proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[11].ff_context.net[0].proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[12].ff_context.net[0].proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[12].ff_context.net[0].proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[13].ff_context.net[0].proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[13].ff_context.net[0].proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[14].ff_context.net[0].proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[14].ff_context.net[0].proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[15].ff_context.net[0].proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[15].ff_context.net[0].proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[16].ff_context.net[0].proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[16].ff_context.net[0].proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[17].ff_context.net[0].proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[17].ff_context.net[0].proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[18].ff_context.net[0].proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[18].ff_context.net[0].proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].norm1.linear._active_adapter[0], accessed_by=ListGetItemGuardAccessor(0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[0].norm1.linear._active_adapter[0] == 'default_0' # for active_adapter in self.active_adapters: # peft/tuners/lora/layer.py:559 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].norm1.linear._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].norm1.linear.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[0].norm1.linear.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[0].norm1.linear.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].norm1.linear._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].norm1.linear._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].norm1.linear._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].norm1.linear._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].norm1._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].norm1._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].norm1._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].norm1._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].norm2, accessed_by=DictGetItemGuardAccessor(norm2) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].norm2, 140581773424864) # norm_hidden_states = self.norm2(hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:182 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].norm2.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].norm2.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].norm2.training, 140591004393440) # norm_hidden_states = self.norm2(hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:182 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff_context, accessed_by=DictGetItemGuardAccessor(ff_context) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].ff_context, 140581773425200) # context_ff_output = self.ff_context(norm_encoder_hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:198 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff_context.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[0].ff_context.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff_context.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].ff_context.training, 140591004393440) # context_ff_output = self.ff_context(norm_encoder_hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:198 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff_context._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff_context.net, accessed_by=DictGetItemGuardAccessor(net) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].ff_context.net, 140581773425344) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[0].ff_context.net, 93831537618768) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- LENGTH_CHECK: len(L['self'].transformer_blocks[0].ff_context.net) == 3 # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff_context.net.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff_context.net.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].ff_context.net.training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff_context.net[0], accessed_by=GetItemGuardAccessor(0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].ff_context.net[0], 140581773425296) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff_context.net[0].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[0].ff_context.net[0].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff_context.net[0].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].ff_context.net[0].training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff_context.net[0]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff_context.net[0].proj, accessed_by=DictGetItemGuardAccessor(proj) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].ff_context.net[0].proj, 140581785247584) # hidden_states = self.proj(hidden_states) # diffusers/src/diffusers/models/activations.py:88 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff_context.net[0].proj.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[0].ff_context.net[0].proj.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff_context.net[0].proj.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].ff_context.net[0].proj.training, 140591004393408) # hidden_states = self.proj(hidden_states) # diffusers/src/diffusers/models/activations.py:88 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff_context.net[0].proj._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff_context.net[0].proj.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].ff_context.net[0].proj.lora_A, 140533121413136) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff_context.net[0].proj.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff_context.net[0].proj.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].ff_context.net[0].proj.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff_context.net[0].proj.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].ff_context.net[0].proj.lora_A['default_0'], 140533121414144) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff_context.net[0].proj.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff_context.net[0].proj.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].ff_context.net[0].proj.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff_context.net[0].proj.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff_context.net[0].proj.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].ff_context.net[0].proj.lora_A['default_0'].weight, 140537311919968) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff_context.net[0].proj.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].ff_context.net[0].proj.lora_B, 140533121413568) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff_context.net[0].proj.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff_context.net[0].proj.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].ff_context.net[0].proj.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff_context.net[0].proj.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].ff_context.net[0].proj.lora_B['default_0'], 140533121415968) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff_context.net[0].proj.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff_context.net[0].proj.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].ff_context.net[0].proj.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff_context.net[0].proj.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].ff_context.net[0].proj.base_layer, 140581773425392) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff_context.net[0].proj.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff_context.net[0].proj.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].ff_context.net[0].proj.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff_context.net[0].proj.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].ff_context.net[0].proj.lora_dropout, 140533121414048) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff_context.net[0].proj.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff_context.net[0].proj.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].ff_context.net[0].proj.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff_context.net[0].proj.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].ff_context.net[0].proj.lora_dropout['default_0'], 140533121414576) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff_context.net[0].proj.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff_context.net[0].proj.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].ff_context.net[0].proj.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff_context.net[0].proj.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[0].ff_context.net[0].proj.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[0].ff_context.net[0].proj.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff_context.net[0].proj.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[0].ff_context.net[0].proj.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff_context.net[0].proj.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[0].ff_context.net[0].proj.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[0].ff_context.net[0].proj.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff_context.net[0].proj.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].ff_context.net[0].proj.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff_context.net[0].proj._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff_context.net[0].proj._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff_context.net[0].proj.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[0].ff_context.net[0].proj.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[0].ff_context.net[0].proj.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff_context.net[0].proj._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].ff_context.net[0].proj._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff_context.net[0].proj._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff_context.net[0].proj._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff_context.net[0].proj._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[0].ff_context.net[0].proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[0].ff_context.net[0].proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff_context.net[0].approximate, accessed_by=DictGetItemGuardAccessor(approximate) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[0].ff_context.net[0].approximate == 'tanh' # return F.gelu(gate, approximate=self.approximate) # diffusers/src/diffusers/models/activations.py:83 in gelu V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff_context.net[0]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff_context.net[0]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff_context.net[0]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff_context.net[0]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff_context.net[1], accessed_by=GetItemGuardAccessor(1) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].ff_context.net[1], 140581773425488) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff_context.net[1].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff_context.net[1].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].ff_context.net[1].training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff_context.net[2], accessed_by=GetItemGuardAccessor(2) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].ff_context.net[2], 140533121416496) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff_context.net[2].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[0].ff_context.net[2].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff_context.net[2].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].ff_context.net[2].training, 140591004393408) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff_context.net[2]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff_context.net[2].lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].ff_context.net[2].lora_A, 140533121416352) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff_context.net[2].lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff_context.net[2].lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].ff_context.net[2].lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff_context.net[2].lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].ff_context.net[2].lora_A['default_0'], 140533121413520) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff_context.net[2].lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff_context.net[2].lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].ff_context.net[2].lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff_context.net[2].lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff_context.net[2].lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].ff_context.net[2].lora_A['default_0'].weight, 140537311913968) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff_context.net[2].lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].ff_context.net[2].lora_B, 140533121403104) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff_context.net[2].lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff_context.net[2].lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].ff_context.net[2].lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff_context.net[2].lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].ff_context.net[2].lora_B['default_0'], 140533121412656) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff_context.net[2].lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff_context.net[2].lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].ff_context.net[2].lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff_context.net[2].base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].ff_context.net[2].base_layer, 140581773425536) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff_context.net[2].base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff_context.net[2].base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].ff_context.net[2].base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff_context.net[2].lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].ff_context.net[2].lora_dropout, 140533121414768) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff_context.net[2].lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff_context.net[2].lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].ff_context.net[2].lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff_context.net[2].lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].ff_context.net[2].lora_dropout['default_0'], 140533121416016) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff_context.net[2].lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff_context.net[2].lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].ff_context.net[2].lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff_context.net[2].scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[0].ff_context.net[2].scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[0].ff_context.net[2].scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff_context.net[2].scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[0].ff_context.net[2].scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff_context.net[2].use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[0].ff_context.net[2].use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[0].ff_context.net[2].use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff_context.net[2].use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].ff_context.net[2].use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff_context.net[2]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff_context.net[2]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff_context.net[2].merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[0].ff_context.net[2].merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[0].ff_context.net[2].merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff_context.net[2]._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].ff_context.net[2]._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff_context.net[2]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff_context.net[2]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff_context.net[2]._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[0].ff_context.net[2]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[0].ff_context.net[2]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff_context._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff_context._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff_context._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff_context._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].norm1_context, accessed_by=DictGetItemGuardAccessor(norm1_context) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].norm1_context, 140581773423712) # norm_encoder_hidden_states, c_gate_msa, c_shift_mlp, c_scale_mlp, c_gate_mlp = self.norm1_context( # diffusers/src/diffusers/models/transformers/transformer_flux.py:167 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].norm1_context.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[0].norm1_context.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].norm1_context.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].norm1_context.training, 140591004393440) # norm_encoder_hidden_states, c_gate_msa, c_shift_mlp, c_scale_mlp, c_gate_mlp = self.norm1_context( # diffusers/src/diffusers/models/transformers/transformer_flux.py:167 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].norm1_context.emb, accessed_by=DictGetItemGuardAccessor(emb) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].norm1_context.emb, 140591004478624) # if self.emb is not None: # diffusers/src/diffusers/models/normalization.py:135 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].norm1_context._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].norm1_context.norm, accessed_by=DictGetItemGuardAccessor(norm) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].norm1_context.norm, 140581773423856) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:139 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].norm1_context.norm.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].norm1_context.norm.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].norm1_context.norm.training, 140591004393440) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:139 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].norm1_context.silu, accessed_by=DictGetItemGuardAccessor(silu) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].norm1_context.silu, 140581773423760) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].norm1_context.silu.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].norm1_context.silu.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].norm1_context.silu.training, 140591004393440) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].norm1_context.linear, accessed_by=DictGetItemGuardAccessor(linear) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].norm1_context.linear, 140533121021360) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].norm1_context.linear.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[0].norm1_context.linear.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].norm1_context.linear.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].norm1_context.linear.training, 140591004393408) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].norm1_context.linear._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].norm1_context.linear.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].norm1_context.linear.lora_A, 140533121018144) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].norm1_context.linear.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].norm1_context.linear.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].norm1_context.linear.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].norm1_context.linear.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].norm1_context.linear.lora_A['default_0'], 140533121129616) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].norm1_context.linear.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].norm1_context.linear.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].norm1_context.linear.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].norm1_context.linear.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].norm1_context.linear.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].norm1_context.linear.lora_A['default_0'].weight, 140533205648864) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].norm1_context.linear.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].norm1_context.linear.lora_B, 140533121015984) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].norm1_context.linear.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].norm1_context.linear.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].norm1_context.linear.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].norm1_context.linear.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].norm1_context.linear.lora_B['default_0'], 140533121129184) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].norm1_context.linear.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].norm1_context.linear.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].norm1_context.linear.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].norm1_context.linear.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].norm1_context.linear.base_layer, 140581773423808) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].norm1_context.linear.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].norm1_context.linear.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].norm1_context.linear.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].norm1_context.linear.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].norm1_context.linear.lora_dropout, 140533121011664) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].norm1_context.linear.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].norm1_context.linear.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].norm1_context.linear.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].norm1_context.linear.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].norm1_context.linear.lora_dropout['default_0'], 140533121018672) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].norm1_context.linear.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].norm1_context.linear.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].norm1_context.linear.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].norm1_context.linear.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[0].norm1_context.linear.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[0].norm1_context.linear.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].norm1_context.linear.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[0].norm1_context.linear.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].norm1_context.linear.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[0].norm1_context.linear.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[0].norm1_context.linear.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].norm1_context.linear.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].norm1_context.linear.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].norm1_context.linear._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].norm1_context.linear._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].norm1_context.linear.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[0].norm1_context.linear.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[0].norm1_context.linear.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].norm1_context.linear._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].norm1_context.linear._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].norm1_context.linear._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].norm1_context.linear._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].norm1_context.linear._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[0].norm1_context.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[0].norm1_context.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].norm1_context._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].norm1_context._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].norm1_context._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].norm1_context._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].norm2_context, accessed_by=DictGetItemGuardAccessor(norm2_context) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].norm2_context, 140581773424912) # norm_encoder_hidden_states = self.norm2_context(encoder_hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:195 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].norm2_context.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].norm2_context.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].norm2_context.training, 140591004393440) # norm_encoder_hidden_states = self.norm2_context(encoder_hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:195 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | +- GuardManager: source=L['self'].transformer_blocks[1], accessed_by=GetItemGuardAccessor(1) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1], 140581773423424) # for index_block, block in enumerate(self.transformer_blocks): # diffusers/src/diffusers/models/transformers/transformer_flux.py:471 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[1].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].training, 140591004393440) # for index_block, block in enumerate(self.transformer_blocks): # diffusers/src/diffusers/models/transformers/transformer_flux.py:471 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff, accessed_by=DictGetItemGuardAccessor(ff) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].ff, 140581767528656) # ff_output = self.ff(norm_hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:185 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[1].ff.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].ff.training, 140591004393440) # ff_output = self.ff(norm_hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:185 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff.net, accessed_by=DictGetItemGuardAccessor(net) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].ff.net, 140581767528896) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[1].ff.net, 93831537618768) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- LENGTH_CHECK: len(L['self'].transformer_blocks[1].ff.net) == 3 # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff.net.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff.net.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].ff.net.training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff.net[0], accessed_by=GetItemGuardAccessor(0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].ff.net[0], 140581767528848) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff.net[0].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[1].ff.net[0].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff.net[0].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].ff.net[0].training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff.net[0]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff.net[0].proj, accessed_by=DictGetItemGuardAccessor(proj) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].ff.net[0].proj, 140533121532880) # hidden_states = self.proj(hidden_states) # diffusers/src/diffusers/models/activations.py:88 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff.net[0].proj.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[1].ff.net[0].proj.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff.net[0].proj.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].ff.net[0].proj.training, 140591004393408) # hidden_states = self.proj(hidden_states) # diffusers/src/diffusers/models/activations.py:88 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff.net[0].proj._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff.net[0].proj.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].ff.net[0].proj.lora_A, 140533121544688) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff.net[0].proj.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff.net[0].proj.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].ff.net[0].proj.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff.net[0].proj.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].ff.net[0].proj.lora_A['default_0'], 140533121589040) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff.net[0].proj.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff.net[0].proj.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].ff.net[0].proj.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff.net[0].proj.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff.net[0].proj.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].ff.net[0].proj.lora_A['default_0'].weight, 140537311556880) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff.net[0].proj.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].ff.net[0].proj.lora_B, 140533121593600) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff.net[0].proj.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff.net[0].proj.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].ff.net[0].proj.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff.net[0].proj.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].ff.net[0].proj.lora_B['default_0'], 140533121589568) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff.net[0].proj.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff.net[0].proj.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].ff.net[0].proj.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff.net[0].proj.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].ff.net[0].proj.base_layer, 140581767528944) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff.net[0].proj.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff.net[0].proj.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].ff.net[0].proj.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff.net[0].proj.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].ff.net[0].proj.lora_dropout, 140533121534176) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff.net[0].proj.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff.net[0].proj.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].ff.net[0].proj.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff.net[0].proj.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].ff.net[0].proj.lora_dropout['default_0'], 140533121536432) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff.net[0].proj.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff.net[0].proj.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].ff.net[0].proj.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff.net[0].proj.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[1].ff.net[0].proj.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[1].ff.net[0].proj.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff.net[0].proj.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[1].ff.net[0].proj.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff.net[0].proj.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[1].ff.net[0].proj.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[1].ff.net[0].proj.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff.net[0].proj.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].ff.net[0].proj.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff.net[0].proj._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff.net[0].proj._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff.net[0].proj.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[1].ff.net[0].proj.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[1].ff.net[0].proj.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff.net[0].proj._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].ff.net[0].proj._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff.net[0].proj._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff.net[0].proj._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff.net[0].proj._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[1].ff.net[0].proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[1].ff.net[0].proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff.net[0].approximate, accessed_by=DictGetItemGuardAccessor(approximate) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[1].ff.net[0].approximate == 'tanh' # return F.gelu(gate, approximate=self.approximate) # diffusers/src/diffusers/models/activations.py:83 in gelu V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff.net[0]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff.net[0]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff.net[0]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff.net[0]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff.net[1], accessed_by=GetItemGuardAccessor(1) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].ff.net[1], 140581767528992) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff.net[1].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff.net[1].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].ff.net[1].training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff.net[2], accessed_by=GetItemGuardAccessor(2) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].ff.net[2], 140533121596912) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff.net[2].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[1].ff.net[2].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff.net[2].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].ff.net[2].training, 140591004393408) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff.net[2]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff.net[2].lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].ff.net[2].lora_A, 140533121595664) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff.net[2].lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff.net[2].lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].ff.net[2].lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff.net[2].lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].ff.net[2].lora_A['default_0'], 140533121589808) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff.net[2].lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff.net[2].lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].ff.net[2].lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff.net[2].lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff.net[2].lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].ff.net[2].lora_A['default_0'].weight, 140537311549040) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff.net[2].lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].ff.net[2].lora_B, 140533121595328) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff.net[2].lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff.net[2].lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].ff.net[2].lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff.net[2].lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].ff.net[2].lora_B['default_0'], 140533121592400) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff.net[2].lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff.net[2].lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].ff.net[2].lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff.net[2].base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].ff.net[2].base_layer, 140581767529040) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff.net[2].base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff.net[2].base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].ff.net[2].base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff.net[2].lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].ff.net[2].lora_dropout, 140533121588032) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff.net[2].lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff.net[2].lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].ff.net[2].lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff.net[2].lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].ff.net[2].lora_dropout['default_0'], 140533121597008) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff.net[2].lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff.net[2].lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].ff.net[2].lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff.net[2].scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[1].ff.net[2].scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[1].ff.net[2].scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff.net[2].scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[1].ff.net[2].scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff.net[2].use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[1].ff.net[2].use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[1].ff.net[2].use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff.net[2].use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].ff.net[2].use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff.net[2]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff.net[2]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff.net[2].merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[1].ff.net[2].merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[1].ff.net[2].merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff.net[2]._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].ff.net[2]._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff.net[2]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff.net[2]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff.net[2]._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[1].ff.net[2]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[1].ff.net[2]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn, accessed_by=DictGetItemGuardAccessor(attn) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn, 140581773426064) # attn_output, context_attn_output = self.attn( # diffusers/src/diffusers/models/transformers/transformer_flux.py:172 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[1].attn.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.training, 140591004393440) # attn_output, context_attn_output = self.attn( # diffusers/src/diffusers/models/transformers/transformer_flux.py:172 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_k, accessed_by=DictGetItemGuardAccessor(to_k) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.to_k, 140533120101024) # key = attn.to_k(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1717 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[1].attn.to_k.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.to_k.training, 140591004393408) # key = attn.to_k(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1717 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_k._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_k.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.to_k.lora_A, 140533120102944) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_k.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_k.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.to_k.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_k.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.to_k.lora_A['default_0'], 140533120098384) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_k.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_k.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.to_k.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_k.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_k.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.to_k.lora_A['default_0'].weight, 140537311660128) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_k.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.to_k.lora_B, 140533120103184) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_k.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_k.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.to_k.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_k.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.to_k.lora_B['default_0'], 140533120091856) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_k.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_k.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.to_k.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_k.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.to_k.base_layer, 140581773426208) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_k.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_k.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.to_k.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_k.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.to_k.lora_dropout, 140533120100832) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_k.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_k.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.to_k.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_k.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.to_k.lora_dropout['default_0'], 140533120101120) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_k.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_k.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.to_k.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_k.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[1].attn.to_k.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[1].attn.to_k.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_k.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[1].attn.to_k.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_k.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[1].attn.to_k.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[1].attn.to_k.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_k.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.to_k.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_k._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_k._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_k.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[1].attn.to_k.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[1].attn.to_k.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_k._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.to_k._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_k._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_k._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_k._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[1].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[1].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_q, accessed_by=DictGetItemGuardAccessor(to_q) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.to_q, 140533121401232) # query = attn.to_q(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1716 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[1].attn.to_q.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.to_q.training, 140591004393408) # query = attn.to_q(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1716 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_q._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_q.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.to_q.lora_A, 140533120098336) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_q.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_q.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.to_q.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_q.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.to_q.lora_A['default_0'], 140533120101264) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_q.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_q.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.to_q.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_q.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_q.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.to_q.lora_A['default_0'].weight, 140537311907008) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_q.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.to_q.lora_B, 140533120100976) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_q.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_q.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.to_q.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_q.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.to_q.lora_B['default_0'], 140533120100640) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_q.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_q.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.to_q.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_q.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.to_q.base_layer, 140581773426304) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_q.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_q.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.to_q.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_q.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.to_q.lora_dropout, 140533121403008) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_q.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_q.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.to_q.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_q.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.to_q.lora_dropout['default_0'], 140533121404352) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_q.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_q.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.to_q.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_q.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[1].attn.to_q.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[1].attn.to_q.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_q.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[1].attn.to_q.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_q.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[1].attn.to_q.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[1].attn.to_q.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_q.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.to_q.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_q._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_q._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_q.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[1].attn.to_q.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[1].attn.to_q.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_q._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.to_q._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_q._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_q._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_q._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[1].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[1].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_v, accessed_by=DictGetItemGuardAccessor(to_v) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.to_v, 140533120100160) # value = attn.to_v(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1718 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_v.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[1].attn.to_v.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_v.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.to_v.training, 140591004393408) # value = attn.to_v(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1718 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_v._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_v.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.to_v.lora_A, 140533120099968) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_v.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_v.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.to_v.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_v.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.to_v.lora_A['default_0'], 140533120096896) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_v.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_v.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.to_v.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_v.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_v.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.to_v.lora_A['default_0'].weight, 140537311663248) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_v.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.to_v.lora_B, 140533120099056) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_v.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_v.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.to_v.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_v.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.to_v.lora_B['default_0'], 140533120097520) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_v.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_v.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.to_v.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_v.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.to_v.base_layer, 140581773426400) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_v.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_v.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.to_v.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_v.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.to_v.lora_dropout, 140533120100112) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_v.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_v.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.to_v.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_v.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.to_v.lora_dropout['default_0'], 140533120097952) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_v.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_v.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.to_v.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_v.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[1].attn.to_v.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[1].attn.to_v.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_v.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[1].attn.to_v.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_v.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[1].attn.to_v.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[1].attn.to_v.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_v.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.to_v.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_v._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_v._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_v.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[1].attn.to_v.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[1].attn.to_v.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_v._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.to_v._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_v._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_v._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_v._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[1].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[1].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.norm_k, accessed_by=DictGetItemGuardAccessor(norm_k) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.norm_k, 140581773426256) # if attn.norm_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1729 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.norm_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[1].attn.norm_k.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.norm_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.norm_k.training, 140591004393440) # if attn.norm_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1729 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.norm_k.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[1].attn.norm_k.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.norm_k._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.norm_k.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.norm_k.weight, 140581785356144) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.norm_k._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.norm_k._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.norm_k._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.norm_k._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.norm_q, accessed_by=DictGetItemGuardAccessor(norm_q) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.norm_q, 140581773426160) # if attn.norm_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1727 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.norm_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[1].attn.norm_q.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.norm_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.norm_q.training, 140591004393440) # if attn.norm_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1727 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.norm_q.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[1].attn.norm_q.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.norm_q._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.norm_q.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.norm_q.weight, 140581765888128) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.norm_q._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.norm_q._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.norm_q._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.norm_q._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_out, accessed_by=DictGetItemGuardAccessor(to_out) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.to_out, 140581773426592) # hidden_states = attn.to_out[0](hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1776 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_out.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_out.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.to_out.training, 140591004393440) # hidden_states = attn.to_out[0](hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1776 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_out[0], accessed_by=GetItemGuardAccessor(0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.to_out[0], 140533121560256) # hidden_states = attn.to_out[0](hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1776 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_out[0].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[1].attn.to_out[0].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_out[0].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.to_out[0].training, 140591004393408) # hidden_states = attn.to_out[0](hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1776 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_out[0]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_out[0].lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.to_out[0].lora_A, 140533121558576) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_out[0].lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_out[0].lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.to_out[0].lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_out[0].lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.to_out[0].lora_A['default_0'], 140533122959248) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_out[0].lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_out[0].lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.to_out[0].lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_out[0].lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_out[0].lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.to_out[0].lora_A['default_0'].weight, 140537311659008) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_out[0].lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.to_out[0].lora_B, 140533121563040) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_out[0].lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_out[0].lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.to_out[0].lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_out[0].lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.to_out[0].lora_B['default_0'], 140533122970720) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_out[0].lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_out[0].lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.to_out[0].lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_out[0].base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.to_out[0].base_layer, 140581773426640) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_out[0].base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_out[0].base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.to_out[0].base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_out[0].lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.to_out[0].lora_dropout, 140533121563856) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_out[0].lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_out[0].lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.to_out[0].lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_out[0].lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.to_out[0].lora_dropout['default_0'], 140533121549312) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_out[0].lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_out[0].lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.to_out[0].lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_out[0].scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[1].attn.to_out[0].scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[1].attn.to_out[0].scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_out[0].scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[1].attn.to_out[0].scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_out[0].use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[1].attn.to_out[0].use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[1].attn.to_out[0].use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_out[0].use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.to_out[0].use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_out[0]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_out[0]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_out[0].merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[1].attn.to_out[0].merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[1].attn.to_out[0].merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_out[0]._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.to_out[0]._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_out[0]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_out[0]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_out[0]._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[1].attn.to_out[0]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[1].attn.to_out[0]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_out[1], accessed_by=GetItemGuardAccessor(1) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.to_out[1], 140581767528512) # hidden_states = attn.to_out[1](hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1778 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_out[1].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_out[1].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.to_out[1].training, 140591004393440) # hidden_states = attn.to_out[1](hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1778 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.add_k_proj, accessed_by=DictGetItemGuardAccessor(add_k_proj) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.add_k_proj, 140533120099488) # encoder_hidden_states_key_proj = attn.add_k_proj(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1736 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.add_k_proj.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[1].attn.add_k_proj.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.add_k_proj.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.add_k_proj.training, 140591004393408) # encoder_hidden_states_key_proj = attn.add_k_proj(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1736 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.add_k_proj._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.add_k_proj.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.add_k_proj.lora_A, 140533120097904) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.add_k_proj.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.add_k_proj.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.add_k_proj.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.add_k_proj.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.add_k_proj.lora_A['default_0'], 140533120096608) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.add_k_proj.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.add_k_proj.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.add_k_proj.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.add_k_proj.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.add_k_proj.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.add_k_proj.lora_A['default_0'].weight, 140537311669408) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.add_k_proj.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.add_k_proj.lora_B, 140533120098000) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.add_k_proj.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.add_k_proj.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.add_k_proj.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.add_k_proj.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.add_k_proj.lora_B['default_0'], 140533120098960) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.add_k_proj.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.add_k_proj.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.add_k_proj.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.add_k_proj.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.add_k_proj.base_layer, 140581773426448) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.add_k_proj.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.add_k_proj.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.add_k_proj.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.add_k_proj.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.add_k_proj.lora_dropout, 140533120097376) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.add_k_proj.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.add_k_proj.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.add_k_proj.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.add_k_proj.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.add_k_proj.lora_dropout['default_0'], 140533120097280) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.add_k_proj.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.add_k_proj.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.add_k_proj.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.add_k_proj.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[1].attn.add_k_proj.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[1].attn.add_k_proj.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.add_k_proj.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[1].attn.add_k_proj.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.add_k_proj.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[1].attn.add_k_proj.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[1].attn.add_k_proj.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.add_k_proj.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.add_k_proj.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.add_k_proj._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.add_k_proj._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.add_k_proj.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[1].attn.add_k_proj.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[1].attn.add_k_proj.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.add_k_proj._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.add_k_proj._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.add_k_proj._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.add_k_proj._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.add_k_proj._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[1].attn.add_k_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[1].attn.add_k_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.add_q_proj, accessed_by=DictGetItemGuardAccessor(add_q_proj) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.add_q_proj, 140533121728080) # encoder_hidden_states_query_proj = attn.add_q_proj(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1735 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.add_q_proj.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[1].attn.add_q_proj.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.add_q_proj.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.add_q_proj.training, 140591004393408) # encoder_hidden_states_query_proj = attn.add_q_proj(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1735 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.add_q_proj._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.add_q_proj.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.add_q_proj.lora_A, 140533121560352) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.add_q_proj.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.add_q_proj.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.add_q_proj.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.add_q_proj.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.add_q_proj.lora_A['default_0'], 140533121559392) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.add_q_proj.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.add_q_proj.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.add_q_proj.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.add_q_proj.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.add_q_proj.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.add_q_proj.lora_A['default_0'].weight, 140537311668848) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.add_q_proj.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.add_q_proj.lora_B, 140533121558288) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.add_q_proj.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.add_q_proj.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.add_q_proj.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.add_q_proj.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.add_q_proj.lora_B['default_0'], 140533121561312) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.add_q_proj.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.add_q_proj.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.add_q_proj.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.add_q_proj.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.add_q_proj.base_layer, 140581773426544) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.add_q_proj.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.add_q_proj.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.add_q_proj.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.add_q_proj.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.add_q_proj.lora_dropout, 140533121551376) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.add_q_proj.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.add_q_proj.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.add_q_proj.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.add_q_proj.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.add_q_proj.lora_dropout['default_0'], 140533121719152) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.add_q_proj.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.add_q_proj.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.add_q_proj.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.add_q_proj.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[1].attn.add_q_proj.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[1].attn.add_q_proj.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.add_q_proj.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[1].attn.add_q_proj.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.add_q_proj.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[1].attn.add_q_proj.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[1].attn.add_q_proj.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.add_q_proj.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.add_q_proj.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.add_q_proj._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.add_q_proj._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.add_q_proj.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[1].attn.add_q_proj.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[1].attn.add_q_proj.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.add_q_proj._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.add_q_proj._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.add_q_proj._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.add_q_proj._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.add_q_proj._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[1].attn.add_q_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[1].attn.add_q_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.add_v_proj, accessed_by=DictGetItemGuardAccessor(add_v_proj) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.add_v_proj, 140533120100016) # encoder_hidden_states_value_proj = attn.add_v_proj(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1737 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.add_v_proj.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[1].attn.add_v_proj.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.add_v_proj.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.add_v_proj.training, 140591004393408) # encoder_hidden_states_value_proj = attn.add_v_proj(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1737 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.add_v_proj._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.add_v_proj.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.add_v_proj.lora_A, 140533120098720) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.add_v_proj.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.add_v_proj.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.add_v_proj.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.add_v_proj.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.add_v_proj.lora_A['default_0'], 140533121727888) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.add_v_proj.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.add_v_proj.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.add_v_proj.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.add_v_proj.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.add_v_proj.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.add_v_proj.lora_A['default_0'].weight, 140537311673328) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.add_v_proj.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.add_v_proj.lora_B, 140533120094832) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.add_v_proj.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.add_v_proj.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.add_v_proj.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.add_v_proj.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.add_v_proj.lora_B['default_0'], 140533121724624) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.add_v_proj.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.add_v_proj.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.add_v_proj.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.add_v_proj.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.add_v_proj.base_layer, 140581773426496) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.add_v_proj.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.add_v_proj.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.add_v_proj.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.add_v_proj.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.add_v_proj.lora_dropout, 140533120096128) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.add_v_proj.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.add_v_proj.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.add_v_proj.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.add_v_proj.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.add_v_proj.lora_dropout['default_0'], 140533120099152) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.add_v_proj.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.add_v_proj.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.add_v_proj.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.add_v_proj.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[1].attn.add_v_proj.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[1].attn.add_v_proj.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.add_v_proj.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[1].attn.add_v_proj.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.add_v_proj.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[1].attn.add_v_proj.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[1].attn.add_v_proj.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.add_v_proj.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.add_v_proj.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.add_v_proj._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.add_v_proj._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.add_v_proj.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[1].attn.add_v_proj.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[1].attn.add_v_proj.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.add_v_proj._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.add_v_proj._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.add_v_proj._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.add_v_proj._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.add_v_proj._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[1].attn.add_v_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[1].attn.add_v_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_add_out, accessed_by=DictGetItemGuardAccessor(to_add_out) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.to_add_out, 140533122960976) # encoder_hidden_states = attn.to_add_out(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1779 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_add_out.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[1].attn.to_add_out.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_add_out.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.to_add_out.training, 140591004393408) # encoder_hidden_states = attn.to_add_out(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1779 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_add_out._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_add_out.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.to_add_out.lora_A, 140533122963664) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_add_out.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_add_out.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.to_add_out.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_add_out.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.to_add_out.lora_A['default_0'], 140533121537536) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_add_out.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_add_out.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.to_add_out.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_add_out.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_add_out.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.to_add_out.lora_A['default_0'].weight, 140537311559520) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_add_out.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.to_add_out.lora_B, 140533121538304) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_add_out.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_add_out.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.to_add_out.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_add_out.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.to_add_out.lora_B['default_0'], 140533121534896) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_add_out.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_add_out.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.to_add_out.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_add_out.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.to_add_out.base_layer, 140581767528560) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_add_out.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_add_out.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.to_add_out.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_add_out.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.to_add_out.lora_dropout, 140533122969808) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_add_out.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_add_out.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.to_add_out.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_add_out.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.to_add_out.lora_dropout['default_0'], 140533122960064) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_add_out.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_add_out.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.to_add_out.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_add_out.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[1].attn.to_add_out.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[1].attn.to_add_out.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_add_out.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[1].attn.to_add_out.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_add_out.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[1].attn.to_add_out.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[1].attn.to_add_out.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_add_out.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.to_add_out.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_add_out._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_add_out._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_add_out.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[1].attn.to_add_out.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[1].attn.to_add_out.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_add_out._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.to_add_out._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_add_out._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_add_out._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_add_out._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[1].attn.to_add_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[1].attn.to_add_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.norm_added_k, accessed_by=DictGetItemGuardAccessor(norm_added_k) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.norm_added_k, 140581767528704) # if attn.norm_added_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1751 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.norm_added_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[1].attn.norm_added_k.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.norm_added_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.norm_added_k.training, 140591004393440) # if attn.norm_added_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1751 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.norm_added_k.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[1].attn.norm_added_k.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.norm_added_k._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.norm_added_k.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.norm_added_k.weight, 140581785356064) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.norm_added_k._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.norm_added_k._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.norm_added_k._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.norm_added_k._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.norm_added_q, accessed_by=DictGetItemGuardAccessor(norm_added_q) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.norm_added_q, 140581767528608) # if attn.norm_added_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1749 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.norm_added_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[1].attn.norm_added_q.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.norm_added_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.norm_added_q.training, 140591004393440) # if attn.norm_added_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1749 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.norm_added_q.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[1].attn.norm_added_q.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.norm_added_q._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.norm_added_q.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.norm_added_q.weight, 140581774377824) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.norm_added_q._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.norm_added_q._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.norm_added_q._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.norm_added_q._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.heads, accessed_by=DictGetItemGuardAccessor(heads) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[1].attn.heads == 24 # head_dim = inner_dim // attn.heads # diffusers/src/diffusers/models/attention_processor.py:1721 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.processor, accessed_by=DictGetItemGuardAccessor(processor) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[1].attn.processor, 93831581524080) # attn_parameters = set(inspect.signature(self.processor.__call__).parameters.keys()) # diffusers/src/diffusers/models/attention_processor.py:479 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.processor, 140581773426016) # return self.processor( # diffusers/src/diffusers/models/attention_processor.py:490 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].norm1, accessed_by=DictGetItemGuardAccessor(norm1) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].norm1, 140581773425584) # norm_hidden_states, gate_msa, shift_mlp, scale_mlp, gate_mlp = self.norm1(hidden_states, emb=temb) # diffusers/src/diffusers/models/transformers/transformer_flux.py:165 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].norm1.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[1].norm1.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].norm1.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].norm1.training, 140591004393440) # norm_hidden_states, gate_msa, shift_mlp, scale_mlp, gate_mlp = self.norm1(hidden_states, emb=temb) # diffusers/src/diffusers/models/transformers/transformer_flux.py:165 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].norm1.emb, accessed_by=DictGetItemGuardAccessor(emb) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].norm1.emb, 140591004478624) # if self.emb is not None: # diffusers/src/diffusers/models/normalization.py:135 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].norm1._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].norm1.norm, accessed_by=DictGetItemGuardAccessor(norm) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].norm1.norm, 140581773425728) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:139 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].norm1.norm.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].norm1.norm.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].norm1.norm.training, 140591004393440) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:139 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].norm1.silu, accessed_by=DictGetItemGuardAccessor(silu) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].norm1.silu, 140581773425632) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].norm1.silu.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].norm1.silu.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].norm1.silu.training, 140591004393440) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].norm1.linear, accessed_by=DictGetItemGuardAccessor(linear) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].norm1.linear, 140533121411744) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].norm1.linear.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[1].norm1.linear.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].norm1.linear.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].norm1.linear.training, 140591004393408) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].norm1.linear._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].norm1.linear.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].norm1.linear.lora_A, 140533121412704) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].norm1.linear.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].norm1.linear.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].norm1.linear.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].norm1.linear.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].norm1.linear.lora_A['default_0'], 140533121414912) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].norm1.linear.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].norm1.linear.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].norm1.linear.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].norm1.linear.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].norm1.linear.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].norm1.linear.lora_A['default_0'].weight, 140537311908928) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].norm1.linear.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].norm1.linear.lora_B, 140533121402864) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].norm1.linear.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].norm1.linear.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].norm1.linear.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].norm1.linear.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].norm1.linear.lora_B['default_0'], 140533121406128) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].norm1.linear.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].norm1.linear.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].norm1.linear.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].norm1.linear.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].norm1.linear.base_layer, 140581773425680) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].norm1.linear.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].norm1.linear.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].norm1.linear.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].norm1.linear.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].norm1.linear.lora_dropout, 140533121411888) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].norm1.linear.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].norm1.linear.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].norm1.linear.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].norm1.linear.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].norm1.linear.lora_dropout['default_0'], 140533121412032) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].norm1.linear.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].norm1.linear.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].norm1.linear.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].norm1.linear.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[1].norm1.linear.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[1].norm1.linear.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].norm1.linear.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[1].norm1.linear.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].norm1.linear.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[1].norm1.linear.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[1].norm1.linear.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].norm1.linear.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].norm1.linear.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].norm1.linear._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].norm1.linear._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].norm1.linear.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[1].norm1.linear.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[1].norm1.linear.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].norm1.linear._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].norm1.linear._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].norm1.linear._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].norm1.linear._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].norm1.linear._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[1].norm1.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[1].norm1.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].norm1._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].norm1._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].norm1._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].norm1._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].norm2, accessed_by=DictGetItemGuardAccessor(norm2) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].norm2, 140581767528752) # norm_hidden_states = self.norm2(hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:182 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].norm2.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].norm2.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].norm2.training, 140591004393440) # norm_hidden_states = self.norm2(hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:182 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff_context, accessed_by=DictGetItemGuardAccessor(ff_context) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].ff_context, 140581767529088) # context_ff_output = self.ff_context(norm_encoder_hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:198 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff_context.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[1].ff_context.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff_context.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].ff_context.training, 140591004393440) # context_ff_output = self.ff_context(norm_encoder_hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:198 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff_context._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff_context.net, accessed_by=DictGetItemGuardAccessor(net) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].ff_context.net, 140581767529232) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[1].ff_context.net, 93831537618768) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- LENGTH_CHECK: len(L['self'].transformer_blocks[1].ff_context.net) == 3 # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff_context.net.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff_context.net.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].ff_context.net.training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff_context.net[0], accessed_by=GetItemGuardAccessor(0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].ff_context.net[0], 140581767529184) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff_context.net[0].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[1].ff_context.net[0].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff_context.net[0].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].ff_context.net[0].training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff_context.net[0]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff_context.net[0].proj, accessed_by=DictGetItemGuardAccessor(proj) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].ff_context.net[0].proj, 140533121596864) # hidden_states = self.proj(hidden_states) # diffusers/src/diffusers/models/activations.py:88 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff_context.net[0].proj.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[1].ff_context.net[0].proj.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff_context.net[0].proj.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].ff_context.net[0].proj.training, 140591004393408) # hidden_states = self.proj(hidden_states) # diffusers/src/diffusers/models/activations.py:88 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff_context.net[0].proj._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff_context.net[0].proj.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].ff_context.net[0].proj.lora_A, 140533121596384) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff_context.net[0].proj.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff_context.net[0].proj.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].ff_context.net[0].proj.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff_context.net[0].proj.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].ff_context.net[0].proj.lora_A['default_0'], 140533121584144) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff_context.net[0].proj.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff_context.net[0].proj.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].ff_context.net[0].proj.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff_context.net[0].proj.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff_context.net[0].proj.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].ff_context.net[0].proj.lora_A['default_0'].weight, 140537311554800) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff_context.net[0].proj.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].ff_context.net[0].proj.lora_B, 140533121582464) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff_context.net[0].proj.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff_context.net[0].proj.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].ff_context.net[0].proj.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff_context.net[0].proj.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].ff_context.net[0].proj.lora_B['default_0'], 140533121589232) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff_context.net[0].proj.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff_context.net[0].proj.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].ff_context.net[0].proj.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff_context.net[0].proj.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].ff_context.net[0].proj.base_layer, 140581767529280) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff_context.net[0].proj.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff_context.net[0].proj.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].ff_context.net[0].proj.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff_context.net[0].proj.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].ff_context.net[0].proj.lora_dropout, 140533121594800) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff_context.net[0].proj.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff_context.net[0].proj.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].ff_context.net[0].proj.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff_context.net[0].proj.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].ff_context.net[0].proj.lora_dropout['default_0'], 140533121593216) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff_context.net[0].proj.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff_context.net[0].proj.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].ff_context.net[0].proj.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff_context.net[0].proj.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[1].ff_context.net[0].proj.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[1].ff_context.net[0].proj.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff_context.net[0].proj.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[1].ff_context.net[0].proj.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff_context.net[0].proj.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[1].ff_context.net[0].proj.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[1].ff_context.net[0].proj.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff_context.net[0].proj.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].ff_context.net[0].proj.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff_context.net[0].proj._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff_context.net[0].proj._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff_context.net[0].proj.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[1].ff_context.net[0].proj.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[1].ff_context.net[0].proj.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff_context.net[0].proj._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].ff_context.net[0].proj._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff_context.net[0].proj._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff_context.net[0].proj._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff_context.net[0].proj._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[1].ff_context.net[0].proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[1].ff_context.net[0].proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff_context.net[0].approximate, accessed_by=DictGetItemGuardAccessor(approximate) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[1].ff_context.net[0].approximate == 'tanh' # return F.gelu(gate, approximate=self.approximate) # diffusers/src/diffusers/models/activations.py:83 in gelu V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff_context.net[0]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff_context.net[0]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff_context.net[0]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff_context.net[0]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff_context.net[1], accessed_by=GetItemGuardAccessor(1) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].ff_context.net[1], 140581767529376) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff_context.net[1].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff_context.net[1].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].ff_context.net[1].training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff_context.net[2], accessed_by=GetItemGuardAccessor(2) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].ff_context.net[2], 140533121590864) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff_context.net[2].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[1].ff_context.net[2].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff_context.net[2].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].ff_context.net[2].training, 140591004393408) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff_context.net[2]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff_context.net[2].lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].ff_context.net[2].lora_A, 140533121594080) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff_context.net[2].lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff_context.net[2].lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].ff_context.net[2].lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff_context.net[2].lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].ff_context.net[2].lora_A['default_0'], 140533120047072) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff_context.net[2].lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff_context.net[2].lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].ff_context.net[2].lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff_context.net[2].lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff_context.net[2].lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].ff_context.net[2].lora_A['default_0'].weight, 140537311552400) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff_context.net[2].lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].ff_context.net[2].lora_B, 140533121582752) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff_context.net[2].lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff_context.net[2].lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].ff_context.net[2].lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff_context.net[2].lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].ff_context.net[2].lora_B['default_0'], 140533120048704) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff_context.net[2].lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff_context.net[2].lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].ff_context.net[2].lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff_context.net[2].base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].ff_context.net[2].base_layer, 140581767529424) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff_context.net[2].base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff_context.net[2].base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].ff_context.net[2].base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff_context.net[2].lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].ff_context.net[2].lora_dropout, 140533121582944) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff_context.net[2].lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff_context.net[2].lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].ff_context.net[2].lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff_context.net[2].lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].ff_context.net[2].lora_dropout['default_0'], 140533121595808) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff_context.net[2].lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff_context.net[2].lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].ff_context.net[2].lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff_context.net[2].scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[1].ff_context.net[2].scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[1].ff_context.net[2].scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff_context.net[2].scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[1].ff_context.net[2].scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff_context.net[2].use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[1].ff_context.net[2].use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[1].ff_context.net[2].use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff_context.net[2].use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].ff_context.net[2].use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff_context.net[2]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff_context.net[2]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff_context.net[2].merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[1].ff_context.net[2].merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[1].ff_context.net[2].merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff_context.net[2]._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].ff_context.net[2]._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff_context.net[2]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff_context.net[2]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff_context.net[2]._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[1].ff_context.net[2]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[1].ff_context.net[2]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff_context._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff_context._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff_context._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff_context._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].norm1_context, accessed_by=DictGetItemGuardAccessor(norm1_context) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].norm1_context, 140581773425776) # norm_encoder_hidden_states, c_gate_msa, c_shift_mlp, c_scale_mlp, c_gate_mlp = self.norm1_context( # diffusers/src/diffusers/models/transformers/transformer_flux.py:167 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].norm1_context.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[1].norm1_context.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].norm1_context.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].norm1_context.training, 140591004393440) # norm_encoder_hidden_states, c_gate_msa, c_shift_mlp, c_scale_mlp, c_gate_mlp = self.norm1_context( # diffusers/src/diffusers/models/transformers/transformer_flux.py:167 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].norm1_context.emb, accessed_by=DictGetItemGuardAccessor(emb) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].norm1_context.emb, 140591004478624) # if self.emb is not None: # diffusers/src/diffusers/models/normalization.py:135 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].norm1_context._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].norm1_context.norm, accessed_by=DictGetItemGuardAccessor(norm) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].norm1_context.norm, 140581773425968) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:139 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].norm1_context.norm.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].norm1_context.norm.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].norm1_context.norm.training, 140591004393440) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:139 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].norm1_context.silu, accessed_by=DictGetItemGuardAccessor(silu) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].norm1_context.silu, 140581773425872) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].norm1_context.silu.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].norm1_context.silu.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].norm1_context.silu.training, 140591004393440) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].norm1_context.linear, accessed_by=DictGetItemGuardAccessor(linear) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].norm1_context.linear, 140533121408480) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].norm1_context.linear.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[1].norm1_context.linear.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].norm1_context.linear.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].norm1_context.linear.training, 140591004393408) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].norm1_context.linear._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].norm1_context.linear.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].norm1_context.linear.lora_A, 140533121407616) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].norm1_context.linear.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].norm1_context.linear.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].norm1_context.linear.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].norm1_context.linear.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].norm1_context.linear.lora_A['default_0'], 140533121410688) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].norm1_context.linear.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].norm1_context.linear.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].norm1_context.linear.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].norm1_context.linear.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].norm1_context.linear.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].norm1_context.linear.lora_A['default_0'].weight, 140537311919408) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].norm1_context.linear.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].norm1_context.linear.lora_B, 140533121411456) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].norm1_context.linear.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].norm1_context.linear.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].norm1_context.linear.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].norm1_context.linear.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].norm1_context.linear.lora_B['default_0'], 140533121409200) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].norm1_context.linear.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].norm1_context.linear.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].norm1_context.linear.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].norm1_context.linear.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].norm1_context.linear.base_layer, 140581773425920) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].norm1_context.linear.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].norm1_context.linear.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].norm1_context.linear.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].norm1_context.linear.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].norm1_context.linear.lora_dropout, 140533121413760) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].norm1_context.linear.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].norm1_context.linear.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].norm1_context.linear.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].norm1_context.linear.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].norm1_context.linear.lora_dropout['default_0'], 140533121407136) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].norm1_context.linear.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].norm1_context.linear.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].norm1_context.linear.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].norm1_context.linear.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[1].norm1_context.linear.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[1].norm1_context.linear.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].norm1_context.linear.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[1].norm1_context.linear.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].norm1_context.linear.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[1].norm1_context.linear.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[1].norm1_context.linear.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].norm1_context.linear.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].norm1_context.linear.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].norm1_context.linear._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].norm1_context.linear._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].norm1_context.linear.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[1].norm1_context.linear.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[1].norm1_context.linear.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].norm1_context.linear._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].norm1_context.linear._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].norm1_context.linear._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].norm1_context.linear._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].norm1_context.linear._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[1].norm1_context.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[1].norm1_context.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].norm1_context._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].norm1_context._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].norm1_context._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].norm1_context._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].norm2_context, accessed_by=DictGetItemGuardAccessor(norm2_context) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].norm2_context, 140581767528800) # norm_encoder_hidden_states = self.norm2_context(encoder_hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:195 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].norm2_context.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].norm2_context.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].norm2_context.training, 140591004393440) # norm_encoder_hidden_states = self.norm2_context(encoder_hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:195 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | +- GuardManager: source=L['self'].transformer_blocks[2], accessed_by=GetItemGuardAccessor(2) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2], 140581773425248) # for index_block, block in enumerate(self.transformer_blocks): # diffusers/src/diffusers/models/transformers/transformer_flux.py:471 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[2].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].training, 140591004393440) # for index_block, block in enumerate(self.transformer_blocks): # diffusers/src/diffusers/models/transformers/transformer_flux.py:471 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff, accessed_by=DictGetItemGuardAccessor(ff) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].ff, 140581767530720) # ff_output = self.ff(norm_hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:185 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[2].ff.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].ff.training, 140591004393440) # ff_output = self.ff(norm_hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:185 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff.net, accessed_by=DictGetItemGuardAccessor(net) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].ff.net, 140581767530960) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[2].ff.net, 93831537618768) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- LENGTH_CHECK: len(L['self'].transformer_blocks[2].ff.net) == 3 # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff.net.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff.net.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].ff.net.training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff.net[0], accessed_by=GetItemGuardAccessor(0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].ff.net[0], 140581767530912) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff.net[0].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[2].ff.net[0].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff.net[0].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].ff.net[0].training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff.net[0]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff.net[0].proj, accessed_by=DictGetItemGuardAccessor(proj) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].ff.net[0].proj, 140533121806544) # hidden_states = self.proj(hidden_states) # diffusers/src/diffusers/models/activations.py:88 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff.net[0].proj.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[2].ff.net[0].proj.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff.net[0].proj.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].ff.net[0].proj.training, 140591004393408) # hidden_states = self.proj(hidden_states) # diffusers/src/diffusers/models/activations.py:88 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff.net[0].proj._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff.net[0].proj.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].ff.net[0].proj.lora_A, 140533121806736) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff.net[0].proj.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff.net[0].proj.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].ff.net[0].proj.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff.net[0].proj.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].ff.net[0].proj.lora_A['default_0'], 140533121808128) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff.net[0].proj.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff.net[0].proj.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].ff.net[0].proj.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff.net[0].proj.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff.net[0].proj.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].ff.net[0].proj.lora_A['default_0'].weight, 140537311183072) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff.net[0].proj.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].ff.net[0].proj.lora_B, 140533121794544) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff.net[0].proj.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff.net[0].proj.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].ff.net[0].proj.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff.net[0].proj.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].ff.net[0].proj.lora_B['default_0'], 140533121809232) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff.net[0].proj.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff.net[0].proj.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].ff.net[0].proj.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff.net[0].proj.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].ff.net[0].proj.base_layer, 140581767531008) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff.net[0].proj.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff.net[0].proj.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].ff.net[0].proj.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff.net[0].proj.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].ff.net[0].proj.lora_dropout, 140533121807408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff.net[0].proj.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff.net[0].proj.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].ff.net[0].proj.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff.net[0].proj.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].ff.net[0].proj.lora_dropout['default_0'], 140533121800736) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff.net[0].proj.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff.net[0].proj.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].ff.net[0].proj.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff.net[0].proj.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[2].ff.net[0].proj.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[2].ff.net[0].proj.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff.net[0].proj.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[2].ff.net[0].proj.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff.net[0].proj.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[2].ff.net[0].proj.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[2].ff.net[0].proj.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff.net[0].proj.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].ff.net[0].proj.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff.net[0].proj._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff.net[0].proj._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff.net[0].proj.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[2].ff.net[0].proj.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[2].ff.net[0].proj.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff.net[0].proj._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].ff.net[0].proj._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff.net[0].proj._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff.net[0].proj._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff.net[0].proj._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[2].ff.net[0].proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[2].ff.net[0].proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff.net[0].approximate, accessed_by=DictGetItemGuardAccessor(approximate) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[2].ff.net[0].approximate == 'tanh' # return F.gelu(gate, approximate=self.approximate) # diffusers/src/diffusers/models/activations.py:83 in gelu V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff.net[0]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff.net[0]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff.net[0]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff.net[0]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff.net[1], accessed_by=GetItemGuardAccessor(1) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].ff.net[1], 140581767531056) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff.net[1].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff.net[1].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].ff.net[1].training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff.net[2], accessed_by=GetItemGuardAccessor(2) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].ff.net[2], 140533121796944) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff.net[2].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[2].ff.net[2].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff.net[2].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].ff.net[2].training, 140591004393408) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff.net[2]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff.net[2].lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].ff.net[2].lora_A, 140533121807792) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff.net[2].lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff.net[2].lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].ff.net[2].lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff.net[2].lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].ff.net[2].lora_A['default_0'], 140533121807024) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff.net[2].lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff.net[2].lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].ff.net[2].lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff.net[2].lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff.net[2].lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].ff.net[2].lora_A['default_0'].weight, 140537311077424) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff.net[2].lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].ff.net[2].lora_B, 140533121802752) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff.net[2].lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff.net[2].lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].ff.net[2].lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff.net[2].lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].ff.net[2].lora_B['default_0'], 140533121807984) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff.net[2].lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff.net[2].lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].ff.net[2].lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff.net[2].base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].ff.net[2].base_layer, 140581767531104) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff.net[2].base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff.net[2].base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].ff.net[2].base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff.net[2].lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].ff.net[2].lora_dropout, 140533121799152) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff.net[2].lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff.net[2].lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].ff.net[2].lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff.net[2].lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].ff.net[2].lora_dropout['default_0'], 140533121799440) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff.net[2].lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff.net[2].lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].ff.net[2].lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff.net[2].scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[2].ff.net[2].scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[2].ff.net[2].scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff.net[2].scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[2].ff.net[2].scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff.net[2].use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[2].ff.net[2].use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[2].ff.net[2].use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff.net[2].use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].ff.net[2].use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff.net[2]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff.net[2]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff.net[2].merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[2].ff.net[2].merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[2].ff.net[2].merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff.net[2]._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].ff.net[2]._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff.net[2]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff.net[2]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff.net[2]._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[2].ff.net[2]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[2].ff.net[2]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn, accessed_by=DictGetItemGuardAccessor(attn) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn, 140581767529952) # attn_output, context_attn_output = self.attn( # diffusers/src/diffusers/models/transformers/transformer_flux.py:172 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[2].attn.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.training, 140591004393440) # attn_output, context_attn_output = self.attn( # diffusers/src/diffusers/models/transformers/transformer_flux.py:172 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_k, accessed_by=DictGetItemGuardAccessor(to_k) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.to_k, 140533119905568) # key = attn.to_k(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1717 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[2].attn.to_k.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.to_k.training, 140591004393408) # key = attn.to_k(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1717 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_k._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_k.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.to_k.lora_A, 140533119903984) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_k.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_k.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.to_k.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_k.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.to_k.lora_A['default_0'], 140533119899328) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_k.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_k.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.to_k.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_k.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_k.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.to_k.lora_A['default_0'].weight, 140537311397424) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_k.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.to_k.lora_B, 140533119893664) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_k.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_k.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.to_k.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_k.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.to_k.lora_B['default_0'], 140533119897168) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_k.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_k.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.to_k.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_k.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.to_k.base_layer, 140581767530096) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_k.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_k.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.to_k.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_k.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.to_k.lora_dropout, 140533119907968) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_k.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_k.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.to_k.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_k.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.to_k.lora_dropout['default_0'], 140533119894048) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_k.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_k.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.to_k.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_k.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[2].attn.to_k.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[2].attn.to_k.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_k.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[2].attn.to_k.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_k.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[2].attn.to_k.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[2].attn.to_k.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_k.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.to_k.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_k._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_k._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_k.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[2].attn.to_k.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[2].attn.to_k.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_k._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.to_k._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_k._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_k._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_k._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[2].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[2].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_q, accessed_by=DictGetItemGuardAccessor(to_q) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.to_q, 140533120445952) # query = attn.to_q(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1716 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[2].attn.to_q.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.to_q.training, 140591004393408) # query = attn.to_q(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1716 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_q._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_q.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.to_q.lora_A, 140533120447680) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_q.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_q.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.to_q.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_q.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.to_q.lora_A['default_0'], 140533119902640) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_q.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_q.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.to_q.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_q.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_q.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.to_q.lora_A['default_0'].weight, 140537311401184) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_q.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.to_q.lora_B, 140533120441920) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_q.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_q.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.to_q.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_q.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.to_q.lora_B['default_0'], 140533119905520) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_q.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_q.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.to_q.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_q.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.to_q.base_layer, 140581767530192) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_q.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_q.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.to_q.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_q.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.to_q.lora_dropout, 140533120446528) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_q.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_q.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.to_q.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_q.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.to_q.lora_dropout['default_0'], 140533120448976) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_q.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_q.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.to_q.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_q.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[2].attn.to_q.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[2].attn.to_q.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_q.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[2].attn.to_q.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_q.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[2].attn.to_q.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[2].attn.to_q.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_q.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.to_q.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_q._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_q._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_q.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[2].attn.to_q.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[2].attn.to_q.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_q._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.to_q._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_q._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_q._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_q._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[2].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[2].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_v, accessed_by=DictGetItemGuardAccessor(to_v) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.to_v, 140533119894720) # value = attn.to_v(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1718 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_v.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[2].attn.to_v.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_v.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.to_v.training, 140591004393408) # value = attn.to_v(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1718 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_v._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_v.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.to_v.lora_A, 140533120317904) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_v.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_v.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.to_v.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_v.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.to_v.lora_A['default_0'], 140533120317328) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_v.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_v.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.to_v.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_v.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_v.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.to_v.lora_A['default_0'].weight, 140537311403024) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_v.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.to_v.lora_B, 140533120318576) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_v.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_v.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.to_v.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_v.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.to_v.lora_B['default_0'], 140533120309504) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_v.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_v.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.to_v.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_v.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.to_v.base_layer, 140581767530288) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_v.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_v.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.to_v.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_v.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.to_v.lora_dropout, 140533119897408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_v.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_v.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.to_v.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_v.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.to_v.lora_dropout['default_0'], 140533119895248) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_v.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_v.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.to_v.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_v.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[2].attn.to_v.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[2].attn.to_v.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_v.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[2].attn.to_v.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_v.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[2].attn.to_v.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[2].attn.to_v.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_v.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.to_v.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_v._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_v._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_v.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[2].attn.to_v.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[2].attn.to_v.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_v._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.to_v._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_v._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_v._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_v._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[2].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[2].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.norm_k, accessed_by=DictGetItemGuardAccessor(norm_k) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.norm_k, 140581767530144) # if attn.norm_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1729 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.norm_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[2].attn.norm_k.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.norm_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.norm_k.training, 140591004393440) # if attn.norm_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1729 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.norm_k.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[2].attn.norm_k.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.norm_k._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.norm_k.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.norm_k.weight, 140581772708016) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.norm_k._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.norm_k._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.norm_k._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.norm_k._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.norm_q, accessed_by=DictGetItemGuardAccessor(norm_q) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.norm_q, 140581767530048) # if attn.norm_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1727 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.norm_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[2].attn.norm_q.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.norm_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.norm_q.training, 140591004393440) # if attn.norm_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1727 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.norm_q.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[2].attn.norm_q.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.norm_q._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.norm_q.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.norm_q.weight, 140581773245904) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.norm_q._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.norm_q._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.norm_q._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.norm_q._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_out, accessed_by=DictGetItemGuardAccessor(to_out) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.to_out, 140581767530480) # hidden_states = attn.to_out[0](hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1776 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_out.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_out.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.to_out.training, 140591004393440) # hidden_states = attn.to_out[0](hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1776 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_out[0], accessed_by=GetItemGuardAccessor(0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.to_out[0], 140533120834512) # hidden_states = attn.to_out[0](hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1776 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_out[0].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[2].attn.to_out[0].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_out[0].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.to_out[0].training, 140591004393408) # hidden_states = attn.to_out[0](hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1776 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_out[0]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_out[0].lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.to_out[0].lora_A, 140533120839984) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_out[0].lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_out[0].lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.to_out[0].lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_out[0].lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.to_out[0].lora_A['default_0'], 140533121383872) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_out[0].lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_out[0].lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.to_out[0].lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_out[0].lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_out[0].lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.to_out[0].lora_A['default_0'].weight, 140537311198192) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_out[0].lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.to_out[0].lora_B, 140533120835520) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_out[0].lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_out[0].lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.to_out[0].lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_out[0].lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.to_out[0].lora_B['default_0'], 140533121380320) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_out[0].lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_out[0].lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.to_out[0].lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_out[0].base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.to_out[0].base_layer, 140581767530528) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_out[0].base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_out[0].base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.to_out[0].base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_out[0].lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.to_out[0].lora_dropout, 140533120839936) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_out[0].lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_out[0].lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.to_out[0].lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_out[0].lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.to_out[0].lora_dropout['default_0'], 140533120829664) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_out[0].lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_out[0].lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.to_out[0].lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_out[0].scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[2].attn.to_out[0].scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[2].attn.to_out[0].scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_out[0].scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[2].attn.to_out[0].scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_out[0].use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[2].attn.to_out[0].use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[2].attn.to_out[0].use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_out[0].use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.to_out[0].use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_out[0]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_out[0]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_out[0].merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[2].attn.to_out[0].merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[2].attn.to_out[0].merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_out[0]._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.to_out[0]._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_out[0]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_out[0]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_out[0]._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[2].attn.to_out[0]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[2].attn.to_out[0]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_out[1], accessed_by=GetItemGuardAccessor(1) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.to_out[1], 140581767530576) # hidden_states = attn.to_out[1](hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1778 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_out[1].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_out[1].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.to_out[1].training, 140591004393440) # hidden_states = attn.to_out[1](hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1778 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.add_k_proj, accessed_by=DictGetItemGuardAccessor(add_k_proj) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.add_k_proj, 140533120304512) # encoder_hidden_states_key_proj = attn.add_k_proj(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1736 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.add_k_proj.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[2].attn.add_k_proj.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.add_k_proj.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.add_k_proj.training, 140591004393408) # encoder_hidden_states_key_proj = attn.add_k_proj(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1736 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.add_k_proj._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.add_k_proj.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.add_k_proj.lora_A, 140533120304704) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.add_k_proj.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.add_k_proj.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.add_k_proj.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.add_k_proj.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.add_k_proj.lora_A['default_0'], 140533120318912) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.add_k_proj.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.add_k_proj.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.add_k_proj.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.add_k_proj.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.add_k_proj.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.add_k_proj.lora_A['default_0'].weight, 140537311396384) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.add_k_proj.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.add_k_proj.lora_B, 140533120317472) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.add_k_proj.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.add_k_proj.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.add_k_proj.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.add_k_proj.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.add_k_proj.lora_B['default_0'], 140533120319104) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.add_k_proj.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.add_k_proj.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.add_k_proj.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.add_k_proj.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.add_k_proj.base_layer, 140581767530336) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.add_k_proj.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.add_k_proj.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.add_k_proj.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.add_k_proj.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.add_k_proj.lora_dropout, 140533120314160) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.add_k_proj.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.add_k_proj.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.add_k_proj.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.add_k_proj.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.add_k_proj.lora_dropout['default_0'], 140533120304416) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.add_k_proj.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.add_k_proj.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.add_k_proj.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.add_k_proj.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[2].attn.add_k_proj.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[2].attn.add_k_proj.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.add_k_proj.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[2].attn.add_k_proj.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.add_k_proj.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[2].attn.add_k_proj.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[2].attn.add_k_proj.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.add_k_proj.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.add_k_proj.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.add_k_proj._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.add_k_proj._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.add_k_proj.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[2].attn.add_k_proj.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[2].attn.add_k_proj.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.add_k_proj._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.add_k_proj._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.add_k_proj._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.add_k_proj._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.add_k_proj._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[2].attn.add_k_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[2].attn.add_k_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.add_q_proj, accessed_by=DictGetItemGuardAccessor(add_q_proj) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.add_q_proj, 140533120571696) # encoder_hidden_states_query_proj = attn.add_q_proj(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1735 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.add_q_proj.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[2].attn.add_q_proj.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.add_q_proj.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.add_q_proj.training, 140591004393408) # encoder_hidden_states_query_proj = attn.add_q_proj(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1735 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.add_q_proj._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.add_q_proj.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.add_q_proj.lora_A, 140533120828416) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.add_q_proj.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.add_q_proj.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.add_q_proj.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.add_q_proj.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.add_q_proj.lora_A['default_0'], 140533120831824) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.add_q_proj.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.add_q_proj.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.add_q_proj.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.add_q_proj.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.add_q_proj.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.add_q_proj.lora_A['default_0'].weight, 140537311192592) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.add_q_proj.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.add_q_proj.lora_B, 140533120831968) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.add_q_proj.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.add_q_proj.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.add_q_proj.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.add_q_proj.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.add_q_proj.lora_B['default_0'], 140533120836576) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.add_q_proj.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.add_q_proj.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.add_q_proj.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.add_q_proj.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.add_q_proj.base_layer, 140581767530432) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.add_q_proj.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.add_q_proj.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.add_q_proj.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.add_q_proj.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.add_q_proj.lora_dropout, 140533120571840) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.add_q_proj.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.add_q_proj.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.add_q_proj.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.add_q_proj.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.add_q_proj.lora_dropout['default_0'], 140533120572080) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.add_q_proj.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.add_q_proj.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.add_q_proj.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.add_q_proj.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[2].attn.add_q_proj.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[2].attn.add_q_proj.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.add_q_proj.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[2].attn.add_q_proj.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.add_q_proj.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[2].attn.add_q_proj.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[2].attn.add_q_proj.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.add_q_proj.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.add_q_proj.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.add_q_proj._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.add_q_proj._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.add_q_proj.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[2].attn.add_q_proj.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[2].attn.add_q_proj.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.add_q_proj._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.add_q_proj._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.add_q_proj._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.add_q_proj._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.add_q_proj._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[2].attn.add_q_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[2].attn.add_q_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.add_v_proj, accessed_by=DictGetItemGuardAccessor(add_v_proj) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.add_v_proj, 140533120305232) # encoder_hidden_states_value_proj = attn.add_v_proj(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1737 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.add_v_proj.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[2].attn.add_v_proj.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.add_v_proj.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.add_v_proj.training, 140591004393408) # encoder_hidden_states_value_proj = attn.add_v_proj(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1737 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.add_v_proj._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.add_v_proj.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.add_v_proj.lora_A, 140533120316848) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.add_v_proj.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.add_v_proj.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.add_v_proj.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.add_v_proj.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.add_v_proj.lora_A['default_0'], 140533120572464) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.add_v_proj.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.add_v_proj.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.add_v_proj.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.add_v_proj.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.add_v_proj.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.add_v_proj.lora_A['default_0'].weight, 140537311192752) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.add_v_proj.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.add_v_proj.lora_B, 140533120307488) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.add_v_proj.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.add_v_proj.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.add_v_proj.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.add_v_proj.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.add_v_proj.lora_B['default_0'], 140533120571168) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.add_v_proj.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.add_v_proj.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.add_v_proj.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.add_v_proj.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.add_v_proj.base_layer, 140581767530384) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.add_v_proj.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.add_v_proj.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.add_v_proj.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.add_v_proj.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.add_v_proj.lora_dropout, 140533120311136) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.add_v_proj.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.add_v_proj.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.add_v_proj.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.add_v_proj.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.add_v_proj.lora_dropout['default_0'], 140533120303744) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.add_v_proj.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.add_v_proj.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.add_v_proj.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.add_v_proj.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[2].attn.add_v_proj.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[2].attn.add_v_proj.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.add_v_proj.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[2].attn.add_v_proj.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.add_v_proj.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[2].attn.add_v_proj.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[2].attn.add_v_proj.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.add_v_proj.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.add_v_proj.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.add_v_proj._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.add_v_proj._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.add_v_proj.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[2].attn.add_v_proj.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[2].attn.add_v_proj.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.add_v_proj._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.add_v_proj._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.add_v_proj._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.add_v_proj._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.add_v_proj._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[2].attn.add_v_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[2].attn.add_v_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_add_out, accessed_by=DictGetItemGuardAccessor(to_add_out) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.to_add_out, 140533121382240) # encoder_hidden_states = attn.to_add_out(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1779 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_add_out.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[2].attn.to_add_out.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_add_out.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.to_add_out.training, 140591004393408) # encoder_hidden_states = attn.to_add_out(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1779 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_add_out._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_add_out.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.to_add_out.lora_A, 140533121383488) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_add_out.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_add_out.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.to_add_out.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_add_out.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.to_add_out.lora_A['default_0'], 140533121808224) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_add_out.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_add_out.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.to_add_out.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_add_out.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_add_out.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.to_add_out.lora_A['default_0'].weight, 140537311185872) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_add_out.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.to_add_out.lora_B, 140533121374512) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_add_out.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_add_out.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.to_add_out.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_add_out.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.to_add_out.lora_B['default_0'], 140533121796608) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_add_out.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_add_out.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.to_add_out.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_add_out.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.to_add_out.base_layer, 140581767530624) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_add_out.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_add_out.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.to_add_out.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_add_out.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.to_add_out.lora_dropout, 140533121382192) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_add_out.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_add_out.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.to_add_out.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_add_out.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.to_add_out.lora_dropout['default_0'], 140533121377488) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_add_out.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_add_out.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.to_add_out.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_add_out.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[2].attn.to_add_out.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[2].attn.to_add_out.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_add_out.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[2].attn.to_add_out.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_add_out.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[2].attn.to_add_out.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[2].attn.to_add_out.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_add_out.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.to_add_out.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_add_out._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_add_out._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_add_out.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[2].attn.to_add_out.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[2].attn.to_add_out.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_add_out._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.to_add_out._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_add_out._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_add_out._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_add_out._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[2].attn.to_add_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[2].attn.to_add_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.norm_added_k, accessed_by=DictGetItemGuardAccessor(norm_added_k) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.norm_added_k, 140581767530768) # if attn.norm_added_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1751 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.norm_added_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[2].attn.norm_added_k.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.norm_added_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.norm_added_k.training, 140591004393440) # if attn.norm_added_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1751 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.norm_added_k.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[2].attn.norm_added_k.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.norm_added_k._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.norm_added_k.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.norm_added_k.weight, 140581766000416) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.norm_added_k._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.norm_added_k._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.norm_added_k._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.norm_added_k._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.norm_added_q, accessed_by=DictGetItemGuardAccessor(norm_added_q) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.norm_added_q, 140581767530672) # if attn.norm_added_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1749 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.norm_added_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[2].attn.norm_added_q.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.norm_added_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.norm_added_q.training, 140591004393440) # if attn.norm_added_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1749 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.norm_added_q.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[2].attn.norm_added_q.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.norm_added_q._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.norm_added_q.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.norm_added_q.weight, 140581766000496) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.norm_added_q._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.norm_added_q._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.norm_added_q._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.norm_added_q._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.heads, accessed_by=DictGetItemGuardAccessor(heads) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[2].attn.heads == 24 # head_dim = inner_dim // attn.heads # diffusers/src/diffusers/models/attention_processor.py:1721 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.processor, accessed_by=DictGetItemGuardAccessor(processor) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[2].attn.processor, 93831581524080) # attn_parameters = set(inspect.signature(self.processor.__call__).parameters.keys()) # diffusers/src/diffusers/models/attention_processor.py:479 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.processor, 140581767529904) # return self.processor( # diffusers/src/diffusers/models/attention_processor.py:490 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].norm1, accessed_by=DictGetItemGuardAccessor(norm1) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].norm1, 140581767529472) # norm_hidden_states, gate_msa, shift_mlp, scale_mlp, gate_mlp = self.norm1(hidden_states, emb=temb) # diffusers/src/diffusers/models/transformers/transformer_flux.py:165 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].norm1.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[2].norm1.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].norm1.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].norm1.training, 140591004393440) # norm_hidden_states, gate_msa, shift_mlp, scale_mlp, gate_mlp = self.norm1(hidden_states, emb=temb) # diffusers/src/diffusers/models/transformers/transformer_flux.py:165 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].norm1.emb, accessed_by=DictGetItemGuardAccessor(emb) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].norm1.emb, 140591004478624) # if self.emb is not None: # diffusers/src/diffusers/models/normalization.py:135 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].norm1._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].norm1.norm, accessed_by=DictGetItemGuardAccessor(norm) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].norm1.norm, 140581767529616) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:139 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].norm1.norm.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].norm1.norm.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].norm1.norm.training, 140591004393440) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:139 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].norm1.silu, accessed_by=DictGetItemGuardAccessor(silu) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].norm1.silu, 140581767529520) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].norm1.silu.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].norm1.silu.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].norm1.silu.training, 140591004393440) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].norm1.linear, accessed_by=DictGetItemGuardAccessor(linear) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].norm1.linear, 140533120055232) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].norm1.linear.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[2].norm1.linear.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].norm1.linear.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].norm1.linear.training, 140591004393408) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].norm1.linear._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].norm1.linear.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].norm1.linear.lora_A, 140533120046784) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].norm1.linear.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].norm1.linear.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].norm1.linear.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].norm1.linear.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].norm1.linear.lora_A['default_0'], 140533120050768) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].norm1.linear.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].norm1.linear.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].norm1.linear.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].norm1.linear.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].norm1.linear.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].norm1.linear.lora_A['default_0'].weight, 140537311396144) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].norm1.linear.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].norm1.linear.lora_B, 140533120053648) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].norm1.linear.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].norm1.linear.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].norm1.linear.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].norm1.linear.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].norm1.linear.lora_B['default_0'], 140533120049568) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].norm1.linear.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].norm1.linear.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].norm1.linear.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].norm1.linear.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].norm1.linear.base_layer, 140581767529568) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].norm1.linear.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].norm1.linear.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].norm1.linear.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].norm1.linear.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].norm1.linear.lora_dropout, 140533120055040) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].norm1.linear.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].norm1.linear.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].norm1.linear.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].norm1.linear.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].norm1.linear.lora_dropout['default_0'], 140533120054992) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].norm1.linear.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].norm1.linear.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].norm1.linear.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].norm1.linear.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[2].norm1.linear.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[2].norm1.linear.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].norm1.linear.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[2].norm1.linear.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].norm1.linear.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[2].norm1.linear.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[2].norm1.linear.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].norm1.linear.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].norm1.linear.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].norm1.linear._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].norm1.linear._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].norm1.linear.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[2].norm1.linear.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[2].norm1.linear.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].norm1.linear._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].norm1.linear._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].norm1.linear._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].norm1.linear._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].norm1.linear._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[2].norm1.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[2].norm1.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].norm1._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].norm1._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].norm1._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].norm1._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].norm2, accessed_by=DictGetItemGuardAccessor(norm2) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].norm2, 140581767530816) # norm_hidden_states = self.norm2(hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:182 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].norm2.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].norm2.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].norm2.training, 140591004393440) # norm_hidden_states = self.norm2(hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:182 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff_context, accessed_by=DictGetItemGuardAccessor(ff_context) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].ff_context, 140581767531152) # context_ff_output = self.ff_context(norm_encoder_hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:198 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff_context.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[2].ff_context.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff_context.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].ff_context.training, 140591004393440) # context_ff_output = self.ff_context(norm_encoder_hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:198 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff_context._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff_context.net, accessed_by=DictGetItemGuardAccessor(net) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].ff_context.net, 140581767531296) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[2].ff_context.net, 93831537618768) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- LENGTH_CHECK: len(L['self'].transformer_blocks[2].ff_context.net) == 3 # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff_context.net.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff_context.net.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].ff_context.net.training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff_context.net[0], accessed_by=GetItemGuardAccessor(0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].ff_context.net[0], 140581767531248) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff_context.net[0].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[2].ff_context.net[0].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff_context.net[0].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].ff_context.net[0].training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff_context.net[0]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff_context.net[0].proj, accessed_by=DictGetItemGuardAccessor(proj) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].ff_context.net[0].proj, 140533121800400) # hidden_states = self.proj(hidden_states) # diffusers/src/diffusers/models/activations.py:88 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff_context.net[0].proj.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[2].ff_context.net[0].proj.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff_context.net[0].proj.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].ff_context.net[0].proj.training, 140591004393408) # hidden_states = self.proj(hidden_states) # diffusers/src/diffusers/models/activations.py:88 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff_context.net[0].proj._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff_context.net[0].proj.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].ff_context.net[0].proj.lora_A, 140533120704256) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff_context.net[0].proj.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff_context.net[0].proj.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].ff_context.net[0].proj.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff_context.net[0].proj.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].ff_context.net[0].proj.lora_A['default_0'], 140533120701520) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff_context.net[0].proj.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff_context.net[0].proj.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].ff_context.net[0].proj.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff_context.net[0].proj.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff_context.net[0].proj.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].ff_context.net[0].proj.lora_A['default_0'].weight, 140537311077504) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff_context.net[0].proj.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].ff_context.net[0].proj.lora_B, 140533120704304) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff_context.net[0].proj.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff_context.net[0].proj.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].ff_context.net[0].proj.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff_context.net[0].proj.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].ff_context.net[0].proj.lora_B['default_0'], 140533120701952) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff_context.net[0].proj.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff_context.net[0].proj.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].ff_context.net[0].proj.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff_context.net[0].proj.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].ff_context.net[0].proj.base_layer, 140581767531344) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff_context.net[0].proj.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff_context.net[0].proj.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].ff_context.net[0].proj.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff_context.net[0].proj.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].ff_context.net[0].proj.lora_dropout, 140533121796752) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff_context.net[0].proj.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff_context.net[0].proj.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].ff_context.net[0].proj.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff_context.net[0].proj.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].ff_context.net[0].proj.lora_dropout['default_0'], 140533121808416) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff_context.net[0].proj.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff_context.net[0].proj.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].ff_context.net[0].proj.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff_context.net[0].proj.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[2].ff_context.net[0].proj.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[2].ff_context.net[0].proj.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff_context.net[0].proj.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[2].ff_context.net[0].proj.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff_context.net[0].proj.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[2].ff_context.net[0].proj.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[2].ff_context.net[0].proj.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff_context.net[0].proj.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].ff_context.net[0].proj.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff_context.net[0].proj._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff_context.net[0].proj._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff_context.net[0].proj.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[2].ff_context.net[0].proj.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[2].ff_context.net[0].proj.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff_context.net[0].proj._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].ff_context.net[0].proj._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff_context.net[0].proj._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff_context.net[0].proj._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff_context.net[0].proj._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[2].ff_context.net[0].proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[2].ff_context.net[0].proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff_context.net[0].approximate, accessed_by=DictGetItemGuardAccessor(approximate) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[2].ff_context.net[0].approximate == 'tanh' # return F.gelu(gate, approximate=self.approximate) # diffusers/src/diffusers/models/activations.py:83 in gelu V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff_context.net[0]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff_context.net[0]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff_context.net[0]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff_context.net[0]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff_context.net[1], accessed_by=GetItemGuardAccessor(1) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].ff_context.net[1], 140581767531440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff_context.net[1].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff_context.net[1].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].ff_context.net[1].training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff_context.net[2], accessed_by=GetItemGuardAccessor(2) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].ff_context.net[2], 140533120705216) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff_context.net[2].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[2].ff_context.net[2].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff_context.net[2].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].ff_context.net[2].training, 140591004393408) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff_context.net[2]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff_context.net[2].lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].ff_context.net[2].lora_A, 140533121327616) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff_context.net[2].lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff_context.net[2].lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].ff_context.net[2].lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff_context.net[2].lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].ff_context.net[2].lora_A['default_0'], 140533121322816) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff_context.net[2].lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff_context.net[2].lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].ff_context.net[2].lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff_context.net[2].lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff_context.net[2].lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].ff_context.net[2].lora_A['default_0'].weight, 140537311079424) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff_context.net[2].lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].ff_context.net[2].lora_B, 140533121321040) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff_context.net[2].lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff_context.net[2].lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].ff_context.net[2].lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff_context.net[2].lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].ff_context.net[2].lora_B['default_0'], 140533121326272) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff_context.net[2].lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff_context.net[2].lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].ff_context.net[2].lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff_context.net[2].base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].ff_context.net[2].base_layer, 140581767531488) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff_context.net[2].base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff_context.net[2].base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].ff_context.net[2].base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff_context.net[2].lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].ff_context.net[2].lora_dropout, 140533121321664) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff_context.net[2].lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff_context.net[2].lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].ff_context.net[2].lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff_context.net[2].lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].ff_context.net[2].lora_dropout['default_0'], 140533121324688) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff_context.net[2].lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff_context.net[2].lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].ff_context.net[2].lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff_context.net[2].scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[2].ff_context.net[2].scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[2].ff_context.net[2].scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff_context.net[2].scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[2].ff_context.net[2].scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff_context.net[2].use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[2].ff_context.net[2].use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[2].ff_context.net[2].use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff_context.net[2].use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].ff_context.net[2].use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff_context.net[2]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff_context.net[2]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff_context.net[2].merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[2].ff_context.net[2].merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[2].ff_context.net[2].merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff_context.net[2]._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].ff_context.net[2]._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff_context.net[2]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff_context.net[2]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff_context.net[2]._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[2].ff_context.net[2]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[2].ff_context.net[2]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff_context._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff_context._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff_context._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff_context._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].norm1_context, accessed_by=DictGetItemGuardAccessor(norm1_context) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].norm1_context, 140581767529664) # norm_encoder_hidden_states, c_gate_msa, c_shift_mlp, c_scale_mlp, c_gate_mlp = self.norm1_context( # diffusers/src/diffusers/models/transformers/transformer_flux.py:167 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].norm1_context.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[2].norm1_context.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].norm1_context.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].norm1_context.training, 140591004393440) # norm_encoder_hidden_states, c_gate_msa, c_shift_mlp, c_scale_mlp, c_gate_mlp = self.norm1_context( # diffusers/src/diffusers/models/transformers/transformer_flux.py:167 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].norm1_context.emb, accessed_by=DictGetItemGuardAccessor(emb) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].norm1_context.emb, 140591004478624) # if self.emb is not None: # diffusers/src/diffusers/models/normalization.py:135 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].norm1_context._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].norm1_context.norm, accessed_by=DictGetItemGuardAccessor(norm) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].norm1_context.norm, 140581767529856) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:139 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].norm1_context.norm.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].norm1_context.norm.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].norm1_context.norm.training, 140591004393440) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:139 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].norm1_context.silu, accessed_by=DictGetItemGuardAccessor(silu) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].norm1_context.silu, 140581767529760) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].norm1_context.silu.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].norm1_context.silu.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].norm1_context.silu.training, 140591004393440) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].norm1_context.linear, accessed_by=DictGetItemGuardAccessor(linear) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].norm1_context.linear, 140533119963616) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].norm1_context.linear.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[2].norm1_context.linear.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].norm1_context.linear.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].norm1_context.linear.training, 140591004393408) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].norm1_context.linear._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].norm1_context.linear.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].norm1_context.linear.lora_A, 140533119974848) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].norm1_context.linear.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].norm1_context.linear.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].norm1_context.linear.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].norm1_context.linear.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].norm1_context.linear.lora_A['default_0'], 140533120434768) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].norm1_context.linear.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].norm1_context.linear.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].norm1_context.linear.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].norm1_context.linear.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].norm1_context.linear.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].norm1_context.linear.lora_A['default_0'].weight, 140537311411904) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].norm1_context.linear.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].norm1_context.linear.lora_B, 140533119972112) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].norm1_context.linear.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].norm1_context.linear.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].norm1_context.linear.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].norm1_context.linear.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].norm1_context.linear.lora_B['default_0'], 140533120437600) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].norm1_context.linear.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].norm1_context.linear.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].norm1_context.linear.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].norm1_context.linear.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].norm1_context.linear.base_layer, 140581767529808) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].norm1_context.linear.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].norm1_context.linear.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].norm1_context.linear.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].norm1_context.linear.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].norm1_context.linear.lora_dropout, 140533119971392) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].norm1_context.linear.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].norm1_context.linear.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].norm1_context.linear.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].norm1_context.linear.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].norm1_context.linear.lora_dropout['default_0'], 140533119974176) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].norm1_context.linear.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].norm1_context.linear.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].norm1_context.linear.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].norm1_context.linear.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[2].norm1_context.linear.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[2].norm1_context.linear.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].norm1_context.linear.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[2].norm1_context.linear.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].norm1_context.linear.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[2].norm1_context.linear.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[2].norm1_context.linear.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].norm1_context.linear.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].norm1_context.linear.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].norm1_context.linear._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].norm1_context.linear._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].norm1_context.linear.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[2].norm1_context.linear.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[2].norm1_context.linear.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].norm1_context.linear._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].norm1_context.linear._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].norm1_context.linear._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].norm1_context.linear._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].norm1_context.linear._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[2].norm1_context.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[2].norm1_context.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].norm1_context._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].norm1_context._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].norm1_context._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].norm1_context._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].norm2_context, accessed_by=DictGetItemGuardAccessor(norm2_context) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].norm2_context, 140581767530864) # norm_encoder_hidden_states = self.norm2_context(encoder_hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:195 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].norm2_context.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].norm2_context.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].norm2_context.training, 140591004393440) # norm_encoder_hidden_states = self.norm2_context(encoder_hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:195 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | +- GuardManager: source=L['self'].transformer_blocks[3], accessed_by=GetItemGuardAccessor(3) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3], 140581767529136) # for index_block, block in enumerate(self.transformer_blocks): # diffusers/src/diffusers/models/transformers/transformer_flux.py:471 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[3].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].training, 140591004393440) # for index_block, block in enumerate(self.transformer_blocks): # diffusers/src/diffusers/models/transformers/transformer_flux.py:471 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff, accessed_by=DictGetItemGuardAccessor(ff) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].ff, 140581767532784) # ff_output = self.ff(norm_hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:185 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[3].ff.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].ff.training, 140591004393440) # ff_output = self.ff(norm_hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:185 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff.net, accessed_by=DictGetItemGuardAccessor(net) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].ff.net, 140581767533024) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[3].ff.net, 93831537618768) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- LENGTH_CHECK: len(L['self'].transformer_blocks[3].ff.net) == 3 # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff.net.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff.net.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].ff.net.training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff.net[0], accessed_by=GetItemGuardAccessor(0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].ff.net[0], 140581767532976) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff.net[0].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[3].ff.net[0].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff.net[0].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].ff.net[0].training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff.net[0]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff.net[0].proj, accessed_by=DictGetItemGuardAccessor(proj) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].ff.net[0].proj, 140533121832688) # hidden_states = self.proj(hidden_states) # diffusers/src/diffusers/models/activations.py:88 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff.net[0].proj.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[3].ff.net[0].proj.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff.net[0].proj.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].ff.net[0].proj.training, 140591004393408) # hidden_states = self.proj(hidden_states) # diffusers/src/diffusers/models/activations.py:88 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff.net[0].proj._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff.net[0].proj.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].ff.net[0].proj.lora_A, 140533121834896) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff.net[0].proj.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff.net[0].proj.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].ff.net[0].proj.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff.net[0].proj.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].ff.net[0].proj.lora_A['default_0'], 140533121835520) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff.net[0].proj.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff.net[0].proj.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].ff.net[0].proj.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff.net[0].proj.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff.net[0].proj.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].ff.net[0].proj.lora_A['default_0'].weight, 140537312871104) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff.net[0].proj.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].ff.net[0].proj.lora_B, 140533121839504) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff.net[0].proj.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff.net[0].proj.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].ff.net[0].proj.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff.net[0].proj.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].ff.net[0].proj.lora_B['default_0'], 140533121835760) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff.net[0].proj.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff.net[0].proj.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].ff.net[0].proj.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff.net[0].proj.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].ff.net[0].proj.base_layer, 140581767533072) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff.net[0].proj.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff.net[0].proj.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].ff.net[0].proj.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff.net[0].proj.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].ff.net[0].proj.lora_dropout, 140533121838064) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff.net[0].proj.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff.net[0].proj.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].ff.net[0].proj.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff.net[0].proj.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].ff.net[0].proj.lora_dropout['default_0'], 140533121833888) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff.net[0].proj.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff.net[0].proj.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].ff.net[0].proj.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff.net[0].proj.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[3].ff.net[0].proj.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[3].ff.net[0].proj.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff.net[0].proj.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[3].ff.net[0].proj.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff.net[0].proj.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[3].ff.net[0].proj.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[3].ff.net[0].proj.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff.net[0].proj.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].ff.net[0].proj.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff.net[0].proj._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff.net[0].proj._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff.net[0].proj.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[3].ff.net[0].proj.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[3].ff.net[0].proj.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff.net[0].proj._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].ff.net[0].proj._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff.net[0].proj._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff.net[0].proj._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff.net[0].proj._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[3].ff.net[0].proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[3].ff.net[0].proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff.net[0].approximate, accessed_by=DictGetItemGuardAccessor(approximate) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[3].ff.net[0].approximate == 'tanh' # return F.gelu(gate, approximate=self.approximate) # diffusers/src/diffusers/models/activations.py:83 in gelu V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff.net[0]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff.net[0]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff.net[0]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff.net[0]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff.net[1], accessed_by=GetItemGuardAccessor(1) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].ff.net[1], 140581767533120) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff.net[1].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff.net[1].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].ff.net[1].training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff.net[2], accessed_by=GetItemGuardAccessor(2) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].ff.net[2], 140533121828272) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff.net[2].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[3].ff.net[2].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff.net[2].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].ff.net[2].training, 140591004393408) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff.net[2]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff.net[2].lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].ff.net[2].lora_A, 140533121833024) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff.net[2].lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff.net[2].lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].ff.net[2].lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff.net[2].lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].ff.net[2].lora_A['default_0'], 140533121834320) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff.net[2].lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff.net[2].lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].ff.net[2].lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff.net[2].lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff.net[2].lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].ff.net[2].lora_A['default_0'].weight, 140537312883424) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff.net[2].lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].ff.net[2].lora_B, 140533121841712) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff.net[2].lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff.net[2].lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].ff.net[2].lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff.net[2].lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].ff.net[2].lora_B['default_0'], 140533120646496) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff.net[2].lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff.net[2].lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].ff.net[2].lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff.net[2].base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].ff.net[2].base_layer, 140581767533168) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff.net[2].base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff.net[2].base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].ff.net[2].base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff.net[2].lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].ff.net[2].lora_dropout, 140533121827120) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff.net[2].lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff.net[2].lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].ff.net[2].lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff.net[2].lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].ff.net[2].lora_dropout['default_0'], 140533121828656) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff.net[2].lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff.net[2].lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].ff.net[2].lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff.net[2].scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[3].ff.net[2].scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[3].ff.net[2].scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff.net[2].scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[3].ff.net[2].scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff.net[2].use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[3].ff.net[2].use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[3].ff.net[2].use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff.net[2].use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].ff.net[2].use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff.net[2]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff.net[2]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff.net[2].merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[3].ff.net[2].merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[3].ff.net[2].merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff.net[2]._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].ff.net[2]._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff.net[2]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff.net[2]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff.net[2]._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[3].ff.net[2]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[3].ff.net[2]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn, accessed_by=DictGetItemGuardAccessor(attn) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn, 140581767532016) # attn_output, context_attn_output = self.attn( # diffusers/src/diffusers/models/transformers/transformer_flux.py:172 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[3].attn.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.training, 140591004393440) # attn_output, context_attn_output = self.attn( # diffusers/src/diffusers/models/transformers/transformer_flux.py:172 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_k, accessed_by=DictGetItemGuardAccessor(to_k) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.to_k, 140533121789248) # key = attn.to_k(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1717 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[3].attn.to_k.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.to_k.training, 140591004393408) # key = attn.to_k(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1717 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_k._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_k.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.to_k.lora_A, 140533121783632) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_k.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_k.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.to_k.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_k.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.to_k.lora_A['default_0'], 140533121790976) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_k.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_k.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.to_k.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_k.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_k.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.to_k.lora_A['default_0'].weight, 140537313061488) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_k.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.to_k.lora_B, 140533121787712) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_k.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_k.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.to_k.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_k.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.to_k.lora_B['default_0'], 140533121792080) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_k.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_k.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.to_k.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_k.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.to_k.base_layer, 140581767532160) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_k.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_k.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.to_k.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_k.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.to_k.lora_dropout, 140533121784160) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_k.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_k.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.to_k.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_k.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.to_k.lora_dropout['default_0'], 140533121793088) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_k.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_k.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.to_k.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_k.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[3].attn.to_k.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[3].attn.to_k.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_k.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[3].attn.to_k.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_k.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[3].attn.to_k.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[3].attn.to_k.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_k.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.to_k.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_k._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_k._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_k.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[3].attn.to_k.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[3].attn.to_k.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_k._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.to_k._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_k._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_k._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_k._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[3].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[3].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_q, accessed_by=DictGetItemGuardAccessor(to_q) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.to_q, 140533119889664) # query = attn.to_q(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1716 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[3].attn.to_q.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.to_q.training, 140591004393408) # query = attn.to_q(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1716 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_q._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_q.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.to_q.lora_A, 140533119888176) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_q.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_q.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.to_q.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_q.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.to_q.lora_A['default_0'], 140533121787328) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_q.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_q.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.to_q.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_q.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_q.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.to_q.lora_A['default_0'].weight, 140537311075584) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_q.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.to_q.lora_B, 140533119881360) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_q.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_q.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.to_q.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_q.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.to_q.lora_B['default_0'], 140533121780080) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_q.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_q.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.to_q.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_q.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.to_q.base_layer, 140581767532256) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_q.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_q.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.to_q.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_q.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.to_q.lora_dropout, 140533119882464) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_q.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_q.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.to_q.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_q.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.to_q.lora_dropout['default_0'], 140533119887648) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_q.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_q.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.to_q.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_q.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[3].attn.to_q.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[3].attn.to_q.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_q.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[3].attn.to_q.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_q.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[3].attn.to_q.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[3].attn.to_q.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_q.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.to_q.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_q._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_q._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_q.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[3].attn.to_q.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[3].attn.to_q.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_q._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.to_q._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_q._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_q._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_q._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[3].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[3].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_v, accessed_by=DictGetItemGuardAccessor(to_v) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.to_v, 140533121778496) # value = attn.to_v(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1718 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_v.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[3].attn.to_v.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_v.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.to_v.training, 140591004393408) # value = attn.to_v(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1718 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_v._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_v.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.to_v.lora_A, 140533120775216) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_v.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_v.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.to_v.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_v.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.to_v.lora_A['default_0'], 140533120767296) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_v.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_v.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.to_v.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_v.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_v.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.to_v.lora_A['default_0'].weight, 140537313052128) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_v.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.to_v.lora_B, 140533120772336) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_v.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_v.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.to_v.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_v.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.to_v.lora_B['default_0'], 140533120765520) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_v.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_v.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.to_v.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_v.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.to_v.base_layer, 140581767532352) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_v.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_v.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.to_v.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_v.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.to_v.lora_dropout, 140533120776128) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_v.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_v.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.to_v.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_v.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.to_v.lora_dropout['default_0'], 140533121784352) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_v.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_v.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.to_v.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_v.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[3].attn.to_v.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[3].attn.to_v.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_v.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[3].attn.to_v.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_v.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[3].attn.to_v.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[3].attn.to_v.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_v.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.to_v.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_v._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_v._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_v.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[3].attn.to_v.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[3].attn.to_v.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_v._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.to_v._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_v._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_v._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_v._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[3].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[3].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.norm_k, accessed_by=DictGetItemGuardAccessor(norm_k) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.norm_k, 140581767532208) # if attn.norm_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1729 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.norm_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[3].attn.norm_k.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.norm_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.norm_k.training, 140591004393440) # if attn.norm_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1729 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.norm_k.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[3].attn.norm_k.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.norm_k._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.norm_k.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.norm_k.weight, 140581766002016) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.norm_k._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.norm_k._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.norm_k._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.norm_k._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.norm_q, accessed_by=DictGetItemGuardAccessor(norm_q) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.norm_q, 140581767532112) # if attn.norm_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1727 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.norm_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[3].attn.norm_q.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.norm_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.norm_q.training, 140591004393440) # if attn.norm_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1727 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.norm_q.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[3].attn.norm_q.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.norm_q._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.norm_q.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.norm_q.weight, 140581766002096) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.norm_q._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.norm_q._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.norm_q._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.norm_q._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_out, accessed_by=DictGetItemGuardAccessor(to_out) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.to_out, 140581767532544) # hidden_states = attn.to_out[0](hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1776 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_out.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_out.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.to_out.training, 140591004393440) # hidden_states = attn.to_out[0](hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1776 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_out[0], accessed_by=GetItemGuardAccessor(0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.to_out[0], 140533121841808) # hidden_states = attn.to_out[0](hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1776 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_out[0].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[3].attn.to_out[0].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_out[0].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.to_out[0].training, 140591004393408) # hidden_states = attn.to_out[0](hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1776 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_out[0]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_out[0].lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.to_out[0].lora_A, 140533121842912) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_out[0].lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_out[0].lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.to_out[0].lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_out[0].lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.to_out[0].lora_A['default_0'], 140533121842816) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_out[0].lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_out[0].lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.to_out[0].lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_out[0].lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_out[0].lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.to_out[0].lora_A['default_0'].weight, 140537312873424) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_out[0].lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.to_out[0].lora_B, 140533121841280) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_out[0].lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_out[0].lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.to_out[0].lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_out[0].lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.to_out[0].lora_B['default_0'], 140533121842576) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_out[0].lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_out[0].lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.to_out[0].lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_out[0].base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.to_out[0].base_layer, 140581767532592) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_out[0].base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_out[0].base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.to_out[0].base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_out[0].lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.to_out[0].lora_dropout, 140533121841376) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_out[0].lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_out[0].lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.to_out[0].lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_out[0].lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.to_out[0].lora_dropout['default_0'], 140533121842048) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_out[0].lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_out[0].lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.to_out[0].lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_out[0].scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[3].attn.to_out[0].scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[3].attn.to_out[0].scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_out[0].scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[3].attn.to_out[0].scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_out[0].use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[3].attn.to_out[0].use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[3].attn.to_out[0].use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_out[0].use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.to_out[0].use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_out[0]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_out[0]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_out[0].merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[3].attn.to_out[0].merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[3].attn.to_out[0].merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_out[0]._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.to_out[0]._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_out[0]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_out[0]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_out[0]._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[3].attn.to_out[0]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[3].attn.to_out[0]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_out[1], accessed_by=GetItemGuardAccessor(1) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.to_out[1], 140581767532640) # hidden_states = attn.to_out[1](hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1778 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_out[1].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_out[1].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.to_out[1].training, 140591004393440) # hidden_states = attn.to_out[1](hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1778 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.add_k_proj, accessed_by=DictGetItemGuardAccessor(add_k_proj) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.add_k_proj, 140533121090512) # encoder_hidden_states_key_proj = attn.add_k_proj(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1736 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.add_k_proj.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[3].attn.add_k_proj.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.add_k_proj.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.add_k_proj.training, 140591004393408) # encoder_hidden_states_key_proj = attn.add_k_proj(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1736 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.add_k_proj._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.add_k_proj.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.add_k_proj.lora_A, 140533121751200) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.add_k_proj.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.add_k_proj.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.add_k_proj.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.add_k_proj.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.add_k_proj.lora_A['default_0'], 140533121746544) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.add_k_proj.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.add_k_proj.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.add_k_proj.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.add_k_proj.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.add_k_proj.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.add_k_proj.lora_A['default_0'].weight, 140537313062688) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.add_k_proj.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.add_k_proj.lora_B, 140533121757776) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.add_k_proj.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.add_k_proj.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.add_k_proj.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.add_k_proj.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.add_k_proj.lora_B['default_0'], 140533121747984) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.add_k_proj.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.add_k_proj.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.add_k_proj.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.add_k_proj.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.add_k_proj.base_layer, 140581767532400) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.add_k_proj.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.add_k_proj.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.add_k_proj.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.add_k_proj.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.add_k_proj.lora_dropout, 140533121748224) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.add_k_proj.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.add_k_proj.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.add_k_proj.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.add_k_proj.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.add_k_proj.lora_dropout['default_0'], 140533121745584) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.add_k_proj.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.add_k_proj.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.add_k_proj.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.add_k_proj.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[3].attn.add_k_proj.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[3].attn.add_k_proj.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.add_k_proj.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[3].attn.add_k_proj.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.add_k_proj.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[3].attn.add_k_proj.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[3].attn.add_k_proj.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.add_k_proj.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.add_k_proj.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.add_k_proj._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.add_k_proj._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.add_k_proj.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[3].attn.add_k_proj.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[3].attn.add_k_proj.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.add_k_proj._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.add_k_proj._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.add_k_proj._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.add_k_proj._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.add_k_proj._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[3].attn.add_k_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[3].attn.add_k_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.add_q_proj, accessed_by=DictGetItemGuardAccessor(add_q_proj) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.add_q_proj, 140533120129088) # encoder_hidden_states_query_proj = attn.add_q_proj(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1735 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.add_q_proj.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[3].attn.add_q_proj.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.add_q_proj.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.add_q_proj.training, 140591004393408) # encoder_hidden_states_query_proj = attn.add_q_proj(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1735 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.add_q_proj._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.add_q_proj.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.add_q_proj.lora_A, 140533120128896) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.add_q_proj.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.add_q_proj.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.add_q_proj.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.add_q_proj.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.add_q_proj.lora_A['default_0'], 140533121841856) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.add_q_proj.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.add_q_proj.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.add_q_proj.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.add_q_proj.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.add_q_proj.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.add_q_proj.lora_A['default_0'].weight, 140537313057728) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.add_q_proj.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.add_q_proj.lora_B, 140533120132304) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.add_q_proj.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.add_q_proj.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.add_q_proj.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.add_q_proj.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.add_q_proj.lora_B['default_0'], 140533121840896) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.add_q_proj.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.add_q_proj.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.add_q_proj.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.add_q_proj.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.add_q_proj.base_layer, 140581767532496) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.add_q_proj.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.add_q_proj.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.add_q_proj.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.add_q_proj.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.add_q_proj.lora_dropout, 140533120128032) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.add_q_proj.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.add_q_proj.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.add_q_proj.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.add_q_proj.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.add_q_proj.lora_dropout['default_0'], 140533120128128) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.add_q_proj.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.add_q_proj.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.add_q_proj.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.add_q_proj.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[3].attn.add_q_proj.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[3].attn.add_q_proj.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.add_q_proj.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[3].attn.add_q_proj.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.add_q_proj.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[3].attn.add_q_proj.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[3].attn.add_q_proj.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.add_q_proj.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.add_q_proj.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.add_q_proj._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.add_q_proj._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.add_q_proj.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[3].attn.add_q_proj.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[3].attn.add_q_proj.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.add_q_proj._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.add_q_proj._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.add_q_proj._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.add_q_proj._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.add_q_proj._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[3].attn.add_q_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[3].attn.add_q_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.add_v_proj, accessed_by=DictGetItemGuardAccessor(add_v_proj) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.add_v_proj, 140533121753840) # encoder_hidden_states_value_proj = attn.add_v_proj(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1737 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.add_v_proj.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[3].attn.add_v_proj.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.add_v_proj.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.add_v_proj.training, 140591004393408) # encoder_hidden_states_value_proj = attn.add_v_proj(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1737 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.add_v_proj._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.add_v_proj.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.add_v_proj.lora_A, 140533121753216) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.add_v_proj.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.add_v_proj.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.add_v_proj.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.add_v_proj.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.add_v_proj.lora_A['default_0'], 140533120137968) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.add_v_proj.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.add_v_proj.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.add_v_proj.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.add_v_proj.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.add_v_proj.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.add_v_proj.lora_A['default_0'].weight, 140537313052208) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.add_v_proj.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.add_v_proj.lora_B, 140533121755376) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.add_v_proj.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.add_v_proj.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.add_v_proj.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.add_v_proj.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.add_v_proj.lora_B['default_0'], 140533120136384) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.add_v_proj.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.add_v_proj.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.add_v_proj.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.add_v_proj.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.add_v_proj.base_layer, 140581767532448) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.add_v_proj.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.add_v_proj.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.add_v_proj.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.add_v_proj.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.add_v_proj.lora_dropout, 140533121756144) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.add_v_proj.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.add_v_proj.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.add_v_proj.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.add_v_proj.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.add_v_proj.lora_dropout['default_0'], 140533121747024) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.add_v_proj.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.add_v_proj.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.add_v_proj.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.add_v_proj.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[3].attn.add_v_proj.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[3].attn.add_v_proj.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.add_v_proj.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[3].attn.add_v_proj.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.add_v_proj.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[3].attn.add_v_proj.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[3].attn.add_v_proj.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.add_v_proj.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.add_v_proj.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.add_v_proj._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.add_v_proj._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.add_v_proj.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[3].attn.add_v_proj.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[3].attn.add_v_proj.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.add_v_proj._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.add_v_proj._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.add_v_proj._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.add_v_proj._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.add_v_proj._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[3].attn.add_v_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[3].attn.add_v_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_add_out, accessed_by=DictGetItemGuardAccessor(to_add_out) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.to_add_out, 140533121842960) # encoder_hidden_states = attn.to_add_out(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1779 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_add_out.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[3].attn.to_add_out.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_add_out.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.to_add_out.training, 140591004393408) # encoder_hidden_states = attn.to_add_out(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1779 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_add_out._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_add_out.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.to_add_out.lora_A, 140533121829136) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_add_out.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_add_out.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.to_add_out.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_add_out.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.to_add_out.lora_A['default_0'], 140533121833600) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_add_out.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_add_out.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.to_add_out.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_add_out.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_add_out.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.to_add_out.lora_A['default_0'].weight, 140537312885904) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_add_out.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.to_add_out.lora_B, 140533121831248) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_add_out.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_add_out.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.to_add_out.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_add_out.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.to_add_out.lora_B['default_0'], 140533121833984) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_add_out.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_add_out.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.to_add_out.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_add_out.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.to_add_out.base_layer, 140581767532688) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_add_out.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_add_out.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.to_add_out.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_add_out.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.to_add_out.lora_dropout, 140533121834176) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_add_out.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_add_out.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.to_add_out.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_add_out.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.to_add_out.lora_dropout['default_0'], 140533121841328) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_add_out.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_add_out.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.to_add_out.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_add_out.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[3].attn.to_add_out.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[3].attn.to_add_out.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_add_out.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[3].attn.to_add_out.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_add_out.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[3].attn.to_add_out.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[3].attn.to_add_out.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_add_out.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.to_add_out.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_add_out._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_add_out._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_add_out.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[3].attn.to_add_out.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[3].attn.to_add_out.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_add_out._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.to_add_out._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_add_out._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_add_out._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_add_out._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[3].attn.to_add_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[3].attn.to_add_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.norm_added_k, accessed_by=DictGetItemGuardAccessor(norm_added_k) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.norm_added_k, 140581767532832) # if attn.norm_added_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1751 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.norm_added_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[3].attn.norm_added_k.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.norm_added_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.norm_added_k.training, 140591004393440) # if attn.norm_added_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1751 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.norm_added_k.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[3].attn.norm_added_k.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.norm_added_k._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.norm_added_k.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.norm_added_k.weight, 140581766001856) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.norm_added_k._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.norm_added_k._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.norm_added_k._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.norm_added_k._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.norm_added_q, accessed_by=DictGetItemGuardAccessor(norm_added_q) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.norm_added_q, 140581767532736) # if attn.norm_added_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1749 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.norm_added_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[3].attn.norm_added_q.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.norm_added_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.norm_added_q.training, 140591004393440) # if attn.norm_added_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1749 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.norm_added_q.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[3].attn.norm_added_q.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.norm_added_q._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.norm_added_q.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.norm_added_q.weight, 140581766001936) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.norm_added_q._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.norm_added_q._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.norm_added_q._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.norm_added_q._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.heads, accessed_by=DictGetItemGuardAccessor(heads) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[3].attn.heads == 24 # head_dim = inner_dim // attn.heads # diffusers/src/diffusers/models/attention_processor.py:1721 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.processor, accessed_by=DictGetItemGuardAccessor(processor) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[3].attn.processor, 93831581524080) # attn_parameters = set(inspect.signature(self.processor.__call__).parameters.keys()) # diffusers/src/diffusers/models/attention_processor.py:479 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.processor, 140581767531968) # return self.processor( # diffusers/src/diffusers/models/attention_processor.py:490 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].norm1, accessed_by=DictGetItemGuardAccessor(norm1) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].norm1, 140581767531536) # norm_hidden_states, gate_msa, shift_mlp, scale_mlp, gate_mlp = self.norm1(hidden_states, emb=temb) # diffusers/src/diffusers/models/transformers/transformer_flux.py:165 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].norm1.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[3].norm1.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].norm1.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].norm1.training, 140591004393440) # norm_hidden_states, gate_msa, shift_mlp, scale_mlp, gate_mlp = self.norm1(hidden_states, emb=temb) # diffusers/src/diffusers/models/transformers/transformer_flux.py:165 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].norm1.emb, accessed_by=DictGetItemGuardAccessor(emb) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].norm1.emb, 140591004478624) # if self.emb is not None: # diffusers/src/diffusers/models/normalization.py:135 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].norm1._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].norm1.norm, accessed_by=DictGetItemGuardAccessor(norm) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].norm1.norm, 140581767531680) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:139 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].norm1.norm.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].norm1.norm.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].norm1.norm.training, 140591004393440) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:139 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].norm1.silu, accessed_by=DictGetItemGuardAccessor(silu) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].norm1.silu, 140581767531584) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].norm1.silu.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].norm1.silu.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].norm1.silu.training, 140591004393440) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].norm1.linear, accessed_by=DictGetItemGuardAccessor(linear) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].norm1.linear, 140533121334192) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].norm1.linear.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[3].norm1.linear.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].norm1.linear.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].norm1.linear.training, 140591004393408) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].norm1.linear._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].norm1.linear.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].norm1.linear.lora_A, 140533121323152) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].norm1.linear.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].norm1.linear.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].norm1.linear.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].norm1.linear.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].norm1.linear.lora_A['default_0'], 140533119879728) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].norm1.linear.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].norm1.linear.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].norm1.linear.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].norm1.linear.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].norm1.linear.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].norm1.linear.lora_A['default_0'].weight, 140537311078304) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].norm1.linear.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].norm1.linear.lora_B, 140533121334528) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].norm1.linear.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].norm1.linear.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].norm1.linear.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].norm1.linear.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].norm1.linear.lora_B['default_0'], 140533119878912) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].norm1.linear.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].norm1.linear.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].norm1.linear.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].norm1.linear.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].norm1.linear.base_layer, 140581767531632) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].norm1.linear.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].norm1.linear.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].norm1.linear.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].norm1.linear.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].norm1.linear.lora_dropout, 140533121332608) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].norm1.linear.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].norm1.linear.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].norm1.linear.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].norm1.linear.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].norm1.linear.lora_dropout['default_0'], 140533121332272) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].norm1.linear.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].norm1.linear.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].norm1.linear.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].norm1.linear.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[3].norm1.linear.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[3].norm1.linear.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].norm1.linear.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[3].norm1.linear.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].norm1.linear.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[3].norm1.linear.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[3].norm1.linear.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].norm1.linear.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].norm1.linear.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].norm1.linear._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].norm1.linear._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].norm1.linear.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[3].norm1.linear.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[3].norm1.linear.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].norm1.linear._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].norm1.linear._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].norm1.linear._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].norm1.linear._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].norm1.linear._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[3].norm1.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[3].norm1.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].norm1._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].norm1._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].norm1._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].norm1._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].norm2, accessed_by=DictGetItemGuardAccessor(norm2) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].norm2, 140581767532880) # norm_hidden_states = self.norm2(hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:182 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].norm2.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].norm2.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].norm2.training, 140591004393440) # norm_hidden_states = self.norm2(hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:182 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff_context, accessed_by=DictGetItemGuardAccessor(ff_context) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].ff_context, 140581767533216) # context_ff_output = self.ff_context(norm_encoder_hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:198 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff_context.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[3].ff_context.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff_context.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].ff_context.training, 140591004393440) # context_ff_output = self.ff_context(norm_encoder_hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:198 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff_context._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff_context.net, accessed_by=DictGetItemGuardAccessor(net) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].ff_context.net, 140581767533360) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[3].ff_context.net, 93831537618768) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- LENGTH_CHECK: len(L['self'].transformer_blocks[3].ff_context.net) == 3 # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff_context.net.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff_context.net.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].ff_context.net.training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff_context.net[0], accessed_by=GetItemGuardAccessor(0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].ff_context.net[0], 140581767533312) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff_context.net[0].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[3].ff_context.net[0].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff_context.net[0].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].ff_context.net[0].training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff_context.net[0]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff_context.net[0].proj, accessed_by=DictGetItemGuardAccessor(proj) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].ff_context.net[0].proj, 140533120640592) # hidden_states = self.proj(hidden_states) # diffusers/src/diffusers/models/activations.py:88 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff_context.net[0].proj.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[3].ff_context.net[0].proj.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff_context.net[0].proj.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].ff_context.net[0].proj.training, 140591004393408) # hidden_states = self.proj(hidden_states) # diffusers/src/diffusers/models/activations.py:88 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff_context.net[0].proj._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff_context.net[0].proj.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].ff_context.net[0].proj.lora_A, 140533120644192) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff_context.net[0].proj.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff_context.net[0].proj.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].ff_context.net[0].proj.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff_context.net[0].proj.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].ff_context.net[0].proj.lora_A['default_0'], 140533120638336) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff_context.net[0].proj.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff_context.net[0].proj.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].ff_context.net[0].proj.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff_context.net[0].proj.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff_context.net[0].proj.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].ff_context.net[0].proj.lora_A['default_0'].weight, 140537312874624) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff_context.net[0].proj.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].ff_context.net[0].proj.lora_B, 140533120631232) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff_context.net[0].proj.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff_context.net[0].proj.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].ff_context.net[0].proj.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff_context.net[0].proj.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].ff_context.net[0].proj.lora_B['default_0'], 140533120631808) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff_context.net[0].proj.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff_context.net[0].proj.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].ff_context.net[0].proj.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff_context.net[0].proj.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].ff_context.net[0].proj.base_layer, 140581767533408) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff_context.net[0].proj.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff_context.net[0].proj.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].ff_context.net[0].proj.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff_context.net[0].proj.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].ff_context.net[0].proj.lora_dropout, 140533120636704) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff_context.net[0].proj.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff_context.net[0].proj.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].ff_context.net[0].proj.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff_context.net[0].proj.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].ff_context.net[0].proj.lora_dropout['default_0'], 140533120640688) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff_context.net[0].proj.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff_context.net[0].proj.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].ff_context.net[0].proj.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff_context.net[0].proj.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[3].ff_context.net[0].proj.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[3].ff_context.net[0].proj.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff_context.net[0].proj.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[3].ff_context.net[0].proj.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff_context.net[0].proj.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[3].ff_context.net[0].proj.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[3].ff_context.net[0].proj.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff_context.net[0].proj.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].ff_context.net[0].proj.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff_context.net[0].proj._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff_context.net[0].proj._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff_context.net[0].proj.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[3].ff_context.net[0].proj.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[3].ff_context.net[0].proj.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff_context.net[0].proj._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].ff_context.net[0].proj._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff_context.net[0].proj._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff_context.net[0].proj._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff_context.net[0].proj._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[3].ff_context.net[0].proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[3].ff_context.net[0].proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff_context.net[0].approximate, accessed_by=DictGetItemGuardAccessor(approximate) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[3].ff_context.net[0].approximate == 'tanh' # return F.gelu(gate, approximate=self.approximate) # diffusers/src/diffusers/models/activations.py:83 in gelu V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff_context.net[0]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff_context.net[0]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff_context.net[0]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff_context.net[0]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff_context.net[1], accessed_by=GetItemGuardAccessor(1) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].ff_context.net[1], 140581767533504) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff_context.net[1].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff_context.net[1].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].ff_context.net[1].training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff_context.net[2], accessed_by=GetItemGuardAccessor(2) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].ff_context.net[2], 140533120637712) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff_context.net[2].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[3].ff_context.net[2].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff_context.net[2].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].ff_context.net[2].training, 140591004393408) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff_context.net[2]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff_context.net[2].lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].ff_context.net[2].lora_A, 140533120642320) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff_context.net[2].lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff_context.net[2].lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].ff_context.net[2].lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff_context.net[2].lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].ff_context.net[2].lora_A['default_0'], 140533120181360) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff_context.net[2].lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff_context.net[2].lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].ff_context.net[2].lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff_context.net[2].lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff_context.net[2].lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].ff_context.net[2].lora_A['default_0'].weight, 140537312870704) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff_context.net[2].lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].ff_context.net[2].lora_B, 140533120632624) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff_context.net[2].lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff_context.net[2].lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].ff_context.net[2].lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff_context.net[2].lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].ff_context.net[2].lora_B['default_0'], 140533120180928) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff_context.net[2].lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff_context.net[2].lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].ff_context.net[2].lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff_context.net[2].base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].ff_context.net[2].base_layer, 140581767533552) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff_context.net[2].base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff_context.net[2].base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].ff_context.net[2].base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff_context.net[2].lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].ff_context.net[2].lora_dropout, 140533120634064) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff_context.net[2].lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff_context.net[2].lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].ff_context.net[2].lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff_context.net[2].lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].ff_context.net[2].lora_dropout['default_0'], 140533120633536) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff_context.net[2].lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff_context.net[2].lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].ff_context.net[2].lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff_context.net[2].scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[3].ff_context.net[2].scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[3].ff_context.net[2].scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff_context.net[2].scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[3].ff_context.net[2].scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff_context.net[2].use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[3].ff_context.net[2].use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[3].ff_context.net[2].use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff_context.net[2].use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].ff_context.net[2].use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff_context.net[2]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff_context.net[2]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff_context.net[2].merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[3].ff_context.net[2].merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[3].ff_context.net[2].merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff_context.net[2]._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].ff_context.net[2]._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff_context.net[2]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff_context.net[2]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff_context.net[2]._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[3].ff_context.net[2]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[3].ff_context.net[2]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff_context._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff_context._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff_context._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff_context._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].norm1_context, accessed_by=DictGetItemGuardAccessor(norm1_context) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].norm1_context, 140581767531728) # norm_encoder_hidden_states, c_gate_msa, c_shift_mlp, c_scale_mlp, c_gate_mlp = self.norm1_context( # diffusers/src/diffusers/models/transformers/transformer_flux.py:167 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].norm1_context.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[3].norm1_context.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].norm1_context.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].norm1_context.training, 140591004393440) # norm_encoder_hidden_states, c_gate_msa, c_shift_mlp, c_scale_mlp, c_gate_mlp = self.norm1_context( # diffusers/src/diffusers/models/transformers/transformer_flux.py:167 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].norm1_context.emb, accessed_by=DictGetItemGuardAccessor(emb) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].norm1_context.emb, 140591004478624) # if self.emb is not None: # diffusers/src/diffusers/models/normalization.py:135 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].norm1_context._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].norm1_context.norm, accessed_by=DictGetItemGuardAccessor(norm) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].norm1_context.norm, 140581767531920) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:139 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].norm1_context.norm.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].norm1_context.norm.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].norm1_context.norm.training, 140591004393440) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:139 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].norm1_context.silu, accessed_by=DictGetItemGuardAccessor(silu) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].norm1_context.silu, 140581767531824) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].norm1_context.silu.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].norm1_context.silu.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].norm1_context.silu.training, 140591004393440) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].norm1_context.linear, accessed_by=DictGetItemGuardAccessor(linear) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].norm1_context.linear, 140533119887552) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].norm1_context.linear.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[3].norm1_context.linear.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].norm1_context.linear.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].norm1_context.linear.training, 140591004393408) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].norm1_context.linear._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].norm1_context.linear.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].norm1_context.linear.lora_A, 140533119882800) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].norm1_context.linear.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].norm1_context.linear.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].norm1_context.linear.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].norm1_context.linear.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].norm1_context.linear.lora_A['default_0'], 140533119893360) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].norm1_context.linear.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].norm1_context.linear.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].norm1_context.linear.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].norm1_context.linear.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].norm1_context.linear.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].norm1_context.linear.lora_A['default_0'].weight, 140537311078064) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].norm1_context.linear.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].norm1_context.linear.lora_B, 140533119891872) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].norm1_context.linear.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].norm1_context.linear.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].norm1_context.linear.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].norm1_context.linear.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].norm1_context.linear.lora_B['default_0'], 140533119882704) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].norm1_context.linear.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].norm1_context.linear.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].norm1_context.linear.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].norm1_context.linear.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].norm1_context.linear.base_layer, 140581767531872) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].norm1_context.linear.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].norm1_context.linear.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].norm1_context.linear.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].norm1_context.linear.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].norm1_context.linear.lora_dropout, 140533119887984) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].norm1_context.linear.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].norm1_context.linear.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].norm1_context.linear.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].norm1_context.linear.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].norm1_context.linear.lora_dropout['default_0'], 140533119883616) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].norm1_context.linear.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].norm1_context.linear.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].norm1_context.linear.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].norm1_context.linear.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[3].norm1_context.linear.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[3].norm1_context.linear.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].norm1_context.linear.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[3].norm1_context.linear.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].norm1_context.linear.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[3].norm1_context.linear.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[3].norm1_context.linear.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].norm1_context.linear.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].norm1_context.linear.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].norm1_context.linear._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].norm1_context.linear._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].norm1_context.linear.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[3].norm1_context.linear.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[3].norm1_context.linear.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].norm1_context.linear._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].norm1_context.linear._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].norm1_context.linear._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].norm1_context.linear._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].norm1_context.linear._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[3].norm1_context.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[3].norm1_context.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].norm1_context._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].norm1_context._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].norm1_context._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].norm1_context._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].norm2_context, accessed_by=DictGetItemGuardAccessor(norm2_context) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].norm2_context, 140581767532928) # norm_encoder_hidden_states = self.norm2_context(encoder_hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:195 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].norm2_context.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].norm2_context.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].norm2_context.training, 140591004393440) # norm_encoder_hidden_states = self.norm2_context(encoder_hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:195 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | +- GuardManager: source=L['self'].transformer_blocks[4], accessed_by=GetItemGuardAccessor(4) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4], 140581767531200) # for index_block, block in enumerate(self.transformer_blocks): # diffusers/src/diffusers/models/transformers/transformer_flux.py:471 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[4].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].training, 140591004393440) # for index_block, block in enumerate(self.transformer_blocks): # diffusers/src/diffusers/models/transformers/transformer_flux.py:471 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff, accessed_by=DictGetItemGuardAccessor(ff) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].ff, 140581767534848) # ff_output = self.ff(norm_hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:185 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[4].ff.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].ff.training, 140591004393440) # ff_output = self.ff(norm_hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:185 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff.net, accessed_by=DictGetItemGuardAccessor(net) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].ff.net, 140581767535088) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[4].ff.net, 93831537618768) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- LENGTH_CHECK: len(L['self'].transformer_blocks[4].ff.net) == 3 # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff.net.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff.net.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].ff.net.training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff.net[0], accessed_by=GetItemGuardAccessor(0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].ff.net[0], 140581767535040) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff.net[0].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[4].ff.net[0].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff.net[0].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].ff.net[0].training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff.net[0]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff.net[0].proj, accessed_by=DictGetItemGuardAccessor(proj) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].ff.net[0].proj, 140533120184096) # hidden_states = self.proj(hidden_states) # diffusers/src/diffusers/models/activations.py:88 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff.net[0].proj.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[4].ff.net[0].proj.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff.net[0].proj.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].ff.net[0].proj.training, 140591004393408) # hidden_states = self.proj(hidden_states) # diffusers/src/diffusers/models/activations.py:88 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff.net[0].proj._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff.net[0].proj.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].ff.net[0].proj.lora_A, 140533120525712) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff.net[0].proj.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff.net[0].proj.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].ff.net[0].proj.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff.net[0].proj.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].ff.net[0].proj.lora_A['default_0'], 140533120523216) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff.net[0].proj.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff.net[0].proj.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].ff.net[0].proj.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff.net[0].proj.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff.net[0].proj.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].ff.net[0].proj.lora_A['default_0'].weight, 140537312473024) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff.net[0].proj.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].ff.net[0].proj.lora_B, 140533120526672) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff.net[0].proj.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff.net[0].proj.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].ff.net[0].proj.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff.net[0].proj.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].ff.net[0].proj.lora_B['default_0'], 140533120520960) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff.net[0].proj.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff.net[0].proj.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].ff.net[0].proj.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff.net[0].proj.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].ff.net[0].proj.base_layer, 140581767535136) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff.net[0].proj.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff.net[0].proj.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].ff.net[0].proj.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff.net[0].proj.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].ff.net[0].proj.lora_dropout, 140533120525376) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff.net[0].proj.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff.net[0].proj.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].ff.net[0].proj.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff.net[0].proj.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].ff.net[0].proj.lora_dropout['default_0'], 140533120525760) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff.net[0].proj.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff.net[0].proj.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].ff.net[0].proj.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff.net[0].proj.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[4].ff.net[0].proj.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[4].ff.net[0].proj.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff.net[0].proj.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[4].ff.net[0].proj.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff.net[0].proj.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[4].ff.net[0].proj.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[4].ff.net[0].proj.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff.net[0].proj.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].ff.net[0].proj.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff.net[0].proj._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff.net[0].proj._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff.net[0].proj.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[4].ff.net[0].proj.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[4].ff.net[0].proj.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff.net[0].proj._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].ff.net[0].proj._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff.net[0].proj._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff.net[0].proj._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff.net[0].proj._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[4].ff.net[0].proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[4].ff.net[0].proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff.net[0].approximate, accessed_by=DictGetItemGuardAccessor(approximate) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[4].ff.net[0].approximate == 'tanh' # return F.gelu(gate, approximate=self.approximate) # diffusers/src/diffusers/models/activations.py:83 in gelu V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff.net[0]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff.net[0]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff.net[0]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff.net[0]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff.net[1], accessed_by=GetItemGuardAccessor(1) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].ff.net[1], 140581767535184) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff.net[1].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff.net[1].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].ff.net[1].training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff.net[2], accessed_by=GetItemGuardAccessor(2) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].ff.net[2], 140533120521920) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff.net[2].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[4].ff.net[2].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff.net[2].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].ff.net[2].training, 140591004393408) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff.net[2]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff.net[2].lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].ff.net[2].lora_A, 140533120522256) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff.net[2].lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff.net[2].lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].ff.net[2].lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff.net[2].lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].ff.net[2].lora_A['default_0'], 140533120892832) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff.net[2].lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff.net[2].lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].ff.net[2].lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff.net[2].lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff.net[2].lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].ff.net[2].lora_A['default_0'].weight, 140537312471744) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff.net[2].lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].ff.net[2].lora_B, 140533120521824) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff.net[2].lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff.net[2].lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].ff.net[2].lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff.net[2].lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].ff.net[2].lora_B['default_0'], 140533120892208) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff.net[2].lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff.net[2].lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].ff.net[2].lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff.net[2].base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].ff.net[2].base_layer, 140581767535232) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff.net[2].base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff.net[2].base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].ff.net[2].base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff.net[2].lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].ff.net[2].lora_dropout, 140533120529312) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff.net[2].lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff.net[2].lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].ff.net[2].lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff.net[2].lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].ff.net[2].lora_dropout['default_0'], 140533120521968) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff.net[2].lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff.net[2].lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].ff.net[2].lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff.net[2].scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[4].ff.net[2].scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[4].ff.net[2].scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff.net[2].scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[4].ff.net[2].scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff.net[2].use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[4].ff.net[2].use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[4].ff.net[2].use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff.net[2].use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].ff.net[2].use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff.net[2]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff.net[2]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff.net[2].merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[4].ff.net[2].merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[4].ff.net[2].merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff.net[2]._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].ff.net[2]._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff.net[2]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff.net[2]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff.net[2]._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[4].ff.net[2]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[4].ff.net[2]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn, accessed_by=DictGetItemGuardAccessor(attn) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn, 140581767534080) # attn_output, context_attn_output = self.attn( # diffusers/src/diffusers/models/transformers/transformer_flux.py:172 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[4].attn.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.training, 140591004393440) # attn_output, context_attn_output = self.attn( # diffusers/src/diffusers/models/transformers/transformer_flux.py:172 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_k, accessed_by=DictGetItemGuardAccessor(to_k) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.to_k, 140533120796896) # key = attn.to_k(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1717 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[4].attn.to_k.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.to_k.training, 140591004393408) # key = attn.to_k(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1717 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_k._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_k.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.to_k.lora_A, 140533120800640) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_k.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_k.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.to_k.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_k.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.to_k.lora_A['default_0'], 140533120801264) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_k.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_k.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.to_k.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_k.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_k.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.to_k.lora_A['default_0'].weight, 140537312735648) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_k.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.to_k.lora_B, 140533120809088) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_k.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_k.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.to_k.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_k.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.to_k.lora_B['default_0'], 140533120801504) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_k.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_k.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.to_k.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_k.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.to_k.base_layer, 140581767534224) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_k.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_k.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.to_k.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_k.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.to_k.lora_dropout, 140533120800688) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_k.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_k.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.to_k.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_k.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.to_k.lora_dropout['default_0'], 140533120796800) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_k.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_k.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.to_k.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_k.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[4].attn.to_k.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[4].attn.to_k.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_k.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[4].attn.to_k.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_k.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[4].attn.to_k.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[4].attn.to_k.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_k.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.to_k.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_k._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_k._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_k.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[4].attn.to_k.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[4].attn.to_k.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_k._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.to_k._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_k._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_k._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_k._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[4].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[4].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_q, accessed_by=DictGetItemGuardAccessor(to_q) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.to_q, 140533120805008) # query = attn.to_q(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1716 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[4].attn.to_q.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.to_q.training, 140591004393408) # query = attn.to_q(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1716 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_q._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_q.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.to_q.lora_A, 140533120810336) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_q.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_q.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.to_q.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_q.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.to_q.lora_A['default_0'], 140533120805872) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_q.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_q.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.to_q.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_q.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_q.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.to_q.lora_A['default_0'].weight, 140537312732048) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_q.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.to_q.lora_B, 140533120803520) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_q.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_q.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.to_q.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_q.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.to_q.lora_B['default_0'], 140533120797712) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_q.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_q.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.to_q.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_q.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.to_q.base_layer, 140581767534320) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_q.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_q.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.to_q.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_q.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.to_q.lora_dropout, 140533120796656) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_q.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_q.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.to_q.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_q.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.to_q.lora_dropout['default_0'], 140533120804528) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_q.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_q.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.to_q.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_q.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[4].attn.to_q.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[4].attn.to_q.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_q.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[4].attn.to_q.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_q.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[4].attn.to_q.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[4].attn.to_q.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_q.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.to_q.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_q._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_q._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_q.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[4].attn.to_q.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[4].attn.to_q.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_q._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.to_q._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_q._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_q._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_q._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[4].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[4].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_v, accessed_by=DictGetItemGuardAccessor(to_v) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.to_v, 140533120795792) # value = attn.to_v(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1718 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_v.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[4].attn.to_v.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_v.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.to_v.training, 140591004393408) # value = attn.to_v(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1718 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_v._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_v.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.to_v.lora_A, 140533120686576) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_v.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_v.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.to_v.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_v.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.to_v.lora_A['default_0'], 140533120685664) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_v.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_v.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.to_v.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_v.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_v.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.to_v.lora_A['default_0'].weight, 140537312727168) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_v.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.to_v.lora_B, 140533120692048) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_v.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_v.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.to_v.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_v.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.to_v.lora_B['default_0'], 140533120690128) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_v.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_v.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.to_v.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_v.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.to_v.base_layer, 140581767534416) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_v.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_v.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.to_v.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_v.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.to_v.lora_dropout, 140533120681584) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_v.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_v.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.to_v.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_v.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.to_v.lora_dropout['default_0'], 140533120680000) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_v.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_v.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.to_v.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_v.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[4].attn.to_v.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[4].attn.to_v.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_v.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[4].attn.to_v.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_v.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[4].attn.to_v.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[4].attn.to_v.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_v.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.to_v.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_v._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_v._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_v.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[4].attn.to_v.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[4].attn.to_v.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_v._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.to_v._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_v._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_v._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_v._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[4].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[4].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.norm_k, accessed_by=DictGetItemGuardAccessor(norm_k) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.norm_k, 140581767534272) # if attn.norm_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1729 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.norm_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[4].attn.norm_k.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.norm_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.norm_k.training, 140591004393440) # if attn.norm_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1729 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.norm_k.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[4].attn.norm_k.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.norm_k._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.norm_k.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.norm_k.weight, 140581766004096) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.norm_k._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.norm_k._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.norm_k._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.norm_k._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.norm_q, accessed_by=DictGetItemGuardAccessor(norm_q) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.norm_q, 140581767534176) # if attn.norm_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1727 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.norm_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[4].attn.norm_q.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.norm_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.norm_q.training, 140591004393440) # if attn.norm_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1727 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.norm_q.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[4].attn.norm_q.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.norm_q._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.norm_q.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.norm_q.weight, 140581783273312) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.norm_q._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.norm_q._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.norm_q._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.norm_q._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_out, accessed_by=DictGetItemGuardAccessor(to_out) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.to_out, 140581767534608) # hidden_states = attn.to_out[0](hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1776 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_out.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_out.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.to_out.training, 140591004393440) # hidden_states = attn.to_out[0](hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1776 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_out[0], accessed_by=GetItemGuardAccessor(0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.to_out[0], 140533120244160) # hidden_states = attn.to_out[0](hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1776 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_out[0].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[4].attn.to_out[0].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_out[0].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.to_out[0].training, 140591004393408) # hidden_states = attn.to_out[0](hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1776 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_out[0]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_out[0].lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.to_out[0].lora_A, 140533120241664) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_out[0].lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_out[0].lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.to_out[0].lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_out[0].lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.to_out[0].lora_A['default_0'], 140533120199568) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_out[0].lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_out[0].lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.to_out[0].lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_out[0].lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_out[0].lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.to_out[0].lora_A['default_0'].weight, 140537312631664) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_out[0].lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.to_out[0].lora_B, 140533120241760) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_out[0].lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_out[0].lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.to_out[0].lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_out[0].lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.to_out[0].lora_B['default_0'], 140533120199424) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_out[0].lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_out[0].lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.to_out[0].lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_out[0].base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.to_out[0].base_layer, 140581767534656) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_out[0].base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_out[0].base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.to_out[0].base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_out[0].lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.to_out[0].lora_dropout, 140533120241040) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_out[0].lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_out[0].lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.to_out[0].lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_out[0].lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.to_out[0].lora_dropout['default_0'], 140533120241472) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_out[0].lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_out[0].lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.to_out[0].lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_out[0].scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[4].attn.to_out[0].scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[4].attn.to_out[0].scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_out[0].scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[4].attn.to_out[0].scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_out[0].use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[4].attn.to_out[0].use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[4].attn.to_out[0].use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_out[0].use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.to_out[0].use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_out[0]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_out[0]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_out[0].merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[4].attn.to_out[0].merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[4].attn.to_out[0].merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_out[0]._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.to_out[0]._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_out[0]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_out[0]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_out[0]._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[4].attn.to_out[0]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[4].attn.to_out[0]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_out[1], accessed_by=GetItemGuardAccessor(1) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.to_out[1], 140581767534704) # hidden_states = attn.to_out[1](hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1778 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_out[1].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_out[1].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.to_out[1].training, 140591004393440) # hidden_states = attn.to_out[1](hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1778 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.add_k_proj, accessed_by=DictGetItemGuardAccessor(add_k_proj) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.add_k_proj, 140533120690368) # encoder_hidden_states_key_proj = attn.add_k_proj(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1736 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.add_k_proj.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[4].attn.add_k_proj.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.add_k_proj.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.add_k_proj.training, 140591004393408) # encoder_hidden_states_key_proj = attn.add_k_proj(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1736 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.add_k_proj._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.add_k_proj.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.add_k_proj.lora_A, 140533120682208) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.add_k_proj.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.add_k_proj.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.add_k_proj.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.add_k_proj.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.add_k_proj.lora_A['default_0'], 140533120250832) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.add_k_proj.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.add_k_proj.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.add_k_proj.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.add_k_proj.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.add_k_proj.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.add_k_proj.lora_A['default_0'].weight, 140537312633104) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.add_k_proj.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.add_k_proj.lora_B, 140533120243776) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.add_k_proj.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.add_k_proj.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.add_k_proj.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.add_k_proj.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.add_k_proj.lora_B['default_0'], 140533120245024) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.add_k_proj.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.add_k_proj.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.add_k_proj.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.add_k_proj.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.add_k_proj.base_layer, 140581767534464) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.add_k_proj.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.add_k_proj.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.add_k_proj.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.add_k_proj.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.add_k_proj.lora_dropout, 140533120695600) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.add_k_proj.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.add_k_proj.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.add_k_proj.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.add_k_proj.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.add_k_proj.lora_dropout['default_0'], 140533120686672) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.add_k_proj.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.add_k_proj.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.add_k_proj.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.add_k_proj.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[4].attn.add_k_proj.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[4].attn.add_k_proj.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.add_k_proj.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[4].attn.add_k_proj.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.add_k_proj.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[4].attn.add_k_proj.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[4].attn.add_k_proj.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.add_k_proj.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.add_k_proj.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.add_k_proj._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.add_k_proj._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.add_k_proj.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[4].attn.add_k_proj.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[4].attn.add_k_proj.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.add_k_proj._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.add_k_proj._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.add_k_proj._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.add_k_proj._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.add_k_proj._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[4].attn.add_k_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[4].attn.add_k_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.add_q_proj, accessed_by=DictGetItemGuardAccessor(add_q_proj) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.add_q_proj, 140533120250736) # encoder_hidden_states_query_proj = attn.add_q_proj(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1735 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.add_q_proj.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[4].attn.add_q_proj.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.add_q_proj.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.add_q_proj.training, 140591004393408) # encoder_hidden_states_query_proj = attn.add_q_proj(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1735 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.add_q_proj._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.add_q_proj.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.add_q_proj.lora_A, 140533120243488) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.add_q_proj.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.add_q_proj.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.add_q_proj.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.add_q_proj.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.add_q_proj.lora_A['default_0'], 140533120240080) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.add_q_proj.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.add_q_proj.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.add_q_proj.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.add_q_proj.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.add_q_proj.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.add_q_proj.lora_A['default_0'].weight, 140537312628304) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.add_q_proj.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.add_q_proj.lora_B, 140533120250688) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.add_q_proj.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.add_q_proj.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.add_q_proj.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.add_q_proj.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.add_q_proj.lora_B['default_0'], 140533120242096) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.add_q_proj.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.add_q_proj.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.add_q_proj.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.add_q_proj.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.add_q_proj.base_layer, 140581767534560) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.add_q_proj.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.add_q_proj.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.add_q_proj.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.add_q_proj.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.add_q_proj.lora_dropout, 140533120239216) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.add_q_proj.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.add_q_proj.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.add_q_proj.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.add_q_proj.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.add_q_proj.lora_dropout['default_0'], 140533120245456) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.add_q_proj.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.add_q_proj.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.add_q_proj.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.add_q_proj.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[4].attn.add_q_proj.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[4].attn.add_q_proj.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.add_q_proj.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[4].attn.add_q_proj.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.add_q_proj.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[4].attn.add_q_proj.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[4].attn.add_q_proj.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.add_q_proj.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.add_q_proj.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.add_q_proj._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.add_q_proj._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.add_q_proj.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[4].attn.add_q_proj.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[4].attn.add_q_proj.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.add_q_proj._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.add_q_proj._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.add_q_proj._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.add_q_proj._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.add_q_proj._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[4].attn.add_q_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[4].attn.add_q_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.add_v_proj, accessed_by=DictGetItemGuardAccessor(add_v_proj) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.add_v_proj, 140533120251744) # encoder_hidden_states_value_proj = attn.add_v_proj(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1737 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.add_v_proj.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[4].attn.add_v_proj.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.add_v_proj.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.add_v_proj.training, 140591004393408) # encoder_hidden_states_value_proj = attn.add_v_proj(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1737 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.add_v_proj._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.add_v_proj.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.add_v_proj.lora_A, 140533120245312) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.add_v_proj.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.add_v_proj.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.add_v_proj.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.add_v_proj.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.add_v_proj.lora_A['default_0'], 140533120244208) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.add_v_proj.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.add_v_proj.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.add_v_proj.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.add_v_proj.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.add_v_proj.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.add_v_proj.lora_A['default_0'].weight, 140537312631344) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.add_v_proj.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.add_v_proj.lora_B, 140533120239456) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.add_v_proj.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.add_v_proj.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.add_v_proj.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.add_v_proj.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.add_v_proj.lora_B['default_0'], 140533120243872) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.add_v_proj.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.add_v_proj.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.add_v_proj.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.add_v_proj.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.add_v_proj.base_layer, 140581767534512) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.add_v_proj.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.add_v_proj.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.add_v_proj.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.add_v_proj.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.add_v_proj.lora_dropout, 140533120244592) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.add_v_proj.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.add_v_proj.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.add_v_proj.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.add_v_proj.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.add_v_proj.lora_dropout['default_0'], 140533120243200) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.add_v_proj.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.add_v_proj.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.add_v_proj.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.add_v_proj.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[4].attn.add_v_proj.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[4].attn.add_v_proj.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.add_v_proj.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[4].attn.add_v_proj.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.add_v_proj.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[4].attn.add_v_proj.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[4].attn.add_v_proj.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.add_v_proj.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.add_v_proj.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.add_v_proj._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.add_v_proj._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.add_v_proj.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[4].attn.add_v_proj.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[4].attn.add_v_proj.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.add_v_proj._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.add_v_proj._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.add_v_proj._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.add_v_proj._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.add_v_proj._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[4].attn.add_v_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[4].attn.add_v_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_add_out, accessed_by=DictGetItemGuardAccessor(to_add_out) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.to_add_out, 140533120198560) # encoder_hidden_states = attn.to_add_out(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1779 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_add_out.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[4].attn.to_add_out.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_add_out.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.to_add_out.training, 140591004393408) # encoder_hidden_states = attn.to_add_out(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1779 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_add_out._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_add_out.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.to_add_out.lora_A, 140533120194960) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_add_out.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_add_out.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.to_add_out.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_add_out.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.to_add_out.lora_A['default_0'], 140533120197696) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_add_out.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_add_out.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.to_add_out.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_add_out.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_add_out.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.to_add_out.lora_A['default_0'].weight, 140537312629984) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_add_out.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.to_add_out.lora_B, 140533120198704) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_add_out.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_add_out.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.to_add_out.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_add_out.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.to_add_out.lora_B['default_0'], 140533120197024) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_add_out.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_add_out.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.to_add_out.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_add_out.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.to_add_out.base_layer, 140581767534752) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_add_out.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_add_out.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.to_add_out.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_add_out.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.to_add_out.lora_dropout, 140533120201488) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_add_out.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_add_out.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.to_add_out.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_add_out.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.to_add_out.lora_dropout['default_0'], 140533120200672) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_add_out.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_add_out.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.to_add_out.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_add_out.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[4].attn.to_add_out.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[4].attn.to_add_out.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_add_out.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[4].attn.to_add_out.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_add_out.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[4].attn.to_add_out.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[4].attn.to_add_out.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_add_out.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.to_add_out.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_add_out._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_add_out._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_add_out.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[4].attn.to_add_out.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[4].attn.to_add_out.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_add_out._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.to_add_out._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_add_out._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_add_out._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_add_out._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[4].attn.to_add_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[4].attn.to_add_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.norm_added_k, accessed_by=DictGetItemGuardAccessor(norm_added_k) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.norm_added_k, 140581767534896) # if attn.norm_added_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1751 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.norm_added_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[4].attn.norm_added_k.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.norm_added_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.norm_added_k.training, 140591004393440) # if attn.norm_added_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1751 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.norm_added_k.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[4].attn.norm_added_k.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.norm_added_k._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.norm_added_k.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.norm_added_k.weight, 140581766003936) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.norm_added_k._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.norm_added_k._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.norm_added_k._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.norm_added_k._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.norm_added_q, accessed_by=DictGetItemGuardAccessor(norm_added_q) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.norm_added_q, 140581767534800) # if attn.norm_added_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1749 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.norm_added_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[4].attn.norm_added_q.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.norm_added_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.norm_added_q.training, 140591004393440) # if attn.norm_added_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1749 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.norm_added_q.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[4].attn.norm_added_q.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.norm_added_q._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.norm_added_q.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.norm_added_q.weight, 140581766004016) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.norm_added_q._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.norm_added_q._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.norm_added_q._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.norm_added_q._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.heads, accessed_by=DictGetItemGuardAccessor(heads) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[4].attn.heads == 24 # head_dim = inner_dim // attn.heads # diffusers/src/diffusers/models/attention_processor.py:1721 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.processor, accessed_by=DictGetItemGuardAccessor(processor) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[4].attn.processor, 93831581524080) # attn_parameters = set(inspect.signature(self.processor.__call__).parameters.keys()) # diffusers/src/diffusers/models/attention_processor.py:479 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.processor, 140581767534032) # return self.processor( # diffusers/src/diffusers/models/attention_processor.py:490 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].norm1, accessed_by=DictGetItemGuardAccessor(norm1) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].norm1, 140581767533600) # norm_hidden_states, gate_msa, shift_mlp, scale_mlp, gate_mlp = self.norm1(hidden_states, emb=temb) # diffusers/src/diffusers/models/transformers/transformer_flux.py:165 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].norm1.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[4].norm1.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].norm1.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].norm1.training, 140591004393440) # norm_hidden_states, gate_msa, shift_mlp, scale_mlp, gate_mlp = self.norm1(hidden_states, emb=temb) # diffusers/src/diffusers/models/transformers/transformer_flux.py:165 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].norm1.emb, accessed_by=DictGetItemGuardAccessor(emb) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].norm1.emb, 140591004478624) # if self.emb is not None: # diffusers/src/diffusers/models/normalization.py:135 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].norm1._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].norm1.norm, accessed_by=DictGetItemGuardAccessor(norm) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].norm1.norm, 140581767533744) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:139 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].norm1.norm.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].norm1.norm.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].norm1.norm.training, 140591004393440) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:139 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].norm1.silu, accessed_by=DictGetItemGuardAccessor(silu) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].norm1.silu, 140581767533648) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].norm1.silu.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].norm1.silu.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].norm1.silu.training, 140591004393440) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].norm1.linear, accessed_by=DictGetItemGuardAccessor(linear) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].norm1.linear, 140533120185968) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].norm1.linear.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[4].norm1.linear.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].norm1.linear.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].norm1.linear.training, 140591004393408) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].norm1.linear._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].norm1.linear.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].norm1.linear.lora_A, 140533120175120) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].norm1.linear.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].norm1.linear.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].norm1.linear.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].norm1.linear.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].norm1.linear.lora_A['default_0'], 140533120175888) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].norm1.linear.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].norm1.linear.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].norm1.linear.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].norm1.linear.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].norm1.linear.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].norm1.linear.lora_A['default_0'].weight, 140537312734208) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].norm1.linear.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].norm1.linear.lora_B, 140533120181744) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].norm1.linear.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].norm1.linear.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].norm1.linear.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].norm1.linear.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].norm1.linear.lora_B['default_0'], 140533120179872) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].norm1.linear.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].norm1.linear.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].norm1.linear.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].norm1.linear.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].norm1.linear.base_layer, 140581767533696) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].norm1.linear.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].norm1.linear.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].norm1.linear.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].norm1.linear.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].norm1.linear.lora_dropout, 140533120184960) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].norm1.linear.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].norm1.linear.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].norm1.linear.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].norm1.linear.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].norm1.linear.lora_dropout['default_0'], 140533120184288) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].norm1.linear.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].norm1.linear.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].norm1.linear.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].norm1.linear.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[4].norm1.linear.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[4].norm1.linear.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].norm1.linear.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[4].norm1.linear.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].norm1.linear.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[4].norm1.linear.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[4].norm1.linear.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].norm1.linear.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].norm1.linear.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].norm1.linear._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].norm1.linear._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].norm1.linear.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[4].norm1.linear.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[4].norm1.linear.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].norm1.linear._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].norm1.linear._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].norm1.linear._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].norm1.linear._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].norm1.linear._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[4].norm1.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[4].norm1.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].norm1._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].norm1._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].norm1._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].norm1._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].norm2, accessed_by=DictGetItemGuardAccessor(norm2) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].norm2, 140581767534944) # norm_hidden_states = self.norm2(hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:182 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].norm2.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].norm2.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].norm2.training, 140591004393440) # norm_hidden_states = self.norm2(hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:182 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff_context, accessed_by=DictGetItemGuardAccessor(ff_context) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].ff_context, 140581767535280) # context_ff_output = self.ff_context(norm_encoder_hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:198 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff_context.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[4].ff_context.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff_context.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].ff_context.training, 140591004393440) # context_ff_output = self.ff_context(norm_encoder_hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:198 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff_context._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff_context.net, accessed_by=DictGetItemGuardAccessor(net) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].ff_context.net, 140581767535424) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[4].ff_context.net, 93831537618768) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- LENGTH_CHECK: len(L['self'].transformer_blocks[4].ff_context.net) == 3 # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff_context.net.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff_context.net.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].ff_context.net.training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff_context.net[0], accessed_by=GetItemGuardAccessor(0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].ff_context.net[0], 140581767535376) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff_context.net[0].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[4].ff_context.net[0].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff_context.net[0].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].ff_context.net[0].training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff_context.net[0]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff_context.net[0].proj, accessed_by=DictGetItemGuardAccessor(proj) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].ff_context.net[0].proj, 140533120884912) # hidden_states = self.proj(hidden_states) # diffusers/src/diffusers/models/activations.py:88 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff_context.net[0].proj.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[4].ff_context.net[0].proj.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff_context.net[0].proj.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].ff_context.net[0].proj.training, 140591004393408) # hidden_states = self.proj(hidden_states) # diffusers/src/diffusers/models/activations.py:88 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff_context.net[0].proj._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff_context.net[0].proj.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].ff_context.net[0].proj.lora_A, 140533120884864) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff_context.net[0].proj.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff_context.net[0].proj.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].ff_context.net[0].proj.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff_context.net[0].proj.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].ff_context.net[0].proj.lora_A['default_0'], 140533120891920) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff_context.net[0].proj.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff_context.net[0].proj.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].ff_context.net[0].proj.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff_context.net[0].proj.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff_context.net[0].proj.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].ff_context.net[0].proj.lora_A['default_0'].weight, 140537312466544) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff_context.net[0].proj.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].ff_context.net[0].proj.lora_B, 140533120884096) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff_context.net[0].proj.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff_context.net[0].proj.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].ff_context.net[0].proj.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff_context.net[0].proj.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].ff_context.net[0].proj.lora_B['default_0'], 140533120891632) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff_context.net[0].proj.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff_context.net[0].proj.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].ff_context.net[0].proj.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff_context.net[0].proj.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].ff_context.net[0].proj.base_layer, 140581767535472) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff_context.net[0].proj.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff_context.net[0].proj.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].ff_context.net[0].proj.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff_context.net[0].proj.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].ff_context.net[0].proj.lora_dropout, 140533120884960) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff_context.net[0].proj.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff_context.net[0].proj.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].ff_context.net[0].proj.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff_context.net[0].proj.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].ff_context.net[0].proj.lora_dropout['default_0'], 140533120883568) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff_context.net[0].proj.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff_context.net[0].proj.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].ff_context.net[0].proj.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff_context.net[0].proj.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[4].ff_context.net[0].proj.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[4].ff_context.net[0].proj.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff_context.net[0].proj.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[4].ff_context.net[0].proj.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff_context.net[0].proj.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[4].ff_context.net[0].proj.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[4].ff_context.net[0].proj.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff_context.net[0].proj.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].ff_context.net[0].proj.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff_context.net[0].proj._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff_context.net[0].proj._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff_context.net[0].proj.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[4].ff_context.net[0].proj.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[4].ff_context.net[0].proj.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff_context.net[0].proj._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].ff_context.net[0].proj._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff_context.net[0].proj._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff_context.net[0].proj._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff_context.net[0].proj._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[4].ff_context.net[0].proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[4].ff_context.net[0].proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff_context.net[0].approximate, accessed_by=DictGetItemGuardAccessor(approximate) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[4].ff_context.net[0].approximate == 'tanh' # return F.gelu(gate, approximate=self.approximate) # diffusers/src/diffusers/models/activations.py:83 in gelu V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff_context.net[0]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff_context.net[0]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff_context.net[0]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff_context.net[0]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff_context.net[1], accessed_by=GetItemGuardAccessor(1) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].ff_context.net[1], 140581767535568) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff_context.net[1].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff_context.net[1].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].ff_context.net[1].training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff_context.net[2], accessed_by=GetItemGuardAccessor(2) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].ff_context.net[2], 140533120886496) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff_context.net[2].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[4].ff_context.net[2].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff_context.net[2].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].ff_context.net[2].training, 140591004393408) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff_context.net[2]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff_context.net[2].lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].ff_context.net[2].lora_A, 140533120886832) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff_context.net[2].lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff_context.net[2].lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].ff_context.net[2].lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff_context.net[2].lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].ff_context.net[2].lora_A['default_0'], 140533120879920) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff_context.net[2].lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff_context.net[2].lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].ff_context.net[2].lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff_context.net[2].lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff_context.net[2].lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].ff_context.net[2].lora_A['default_0'].weight, 140537312473504) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff_context.net[2].lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].ff_context.net[2].lora_B, 140533120883664) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff_context.net[2].lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff_context.net[2].lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].ff_context.net[2].lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff_context.net[2].lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].ff_context.net[2].lora_B['default_0'], 140533120892064) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff_context.net[2].lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff_context.net[2].lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].ff_context.net[2].lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff_context.net[2].base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].ff_context.net[2].base_layer, 140581767535616) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff_context.net[2].base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff_context.net[2].base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].ff_context.net[2].base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff_context.net[2].lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].ff_context.net[2].lora_dropout, 140533120887168) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff_context.net[2].lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff_context.net[2].lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].ff_context.net[2].lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff_context.net[2].lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].ff_context.net[2].lora_dropout['default_0'], 140533120887120) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff_context.net[2].lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff_context.net[2].lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].ff_context.net[2].lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff_context.net[2].scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[4].ff_context.net[2].scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[4].ff_context.net[2].scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff_context.net[2].scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[4].ff_context.net[2].scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff_context.net[2].use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[4].ff_context.net[2].use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[4].ff_context.net[2].use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff_context.net[2].use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].ff_context.net[2].use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff_context.net[2]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff_context.net[2]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff_context.net[2].merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[4].ff_context.net[2].merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[4].ff_context.net[2].merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff_context.net[2]._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].ff_context.net[2]._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff_context.net[2]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff_context.net[2]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff_context.net[2]._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[4].ff_context.net[2]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[4].ff_context.net[2]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff_context._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff_context._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff_context._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff_context._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].norm1_context, accessed_by=DictGetItemGuardAccessor(norm1_context) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].norm1_context, 140581767533792) # norm_encoder_hidden_states, c_gate_msa, c_shift_mlp, c_scale_mlp, c_gate_mlp = self.norm1_context( # diffusers/src/diffusers/models/transformers/transformer_flux.py:167 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].norm1_context.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[4].norm1_context.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].norm1_context.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].norm1_context.training, 140591004393440) # norm_encoder_hidden_states, c_gate_msa, c_shift_mlp, c_scale_mlp, c_gate_mlp = self.norm1_context( # diffusers/src/diffusers/models/transformers/transformer_flux.py:167 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].norm1_context.emb, accessed_by=DictGetItemGuardAccessor(emb) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].norm1_context.emb, 140591004478624) # if self.emb is not None: # diffusers/src/diffusers/models/normalization.py:135 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].norm1_context._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].norm1_context.norm, accessed_by=DictGetItemGuardAccessor(norm) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].norm1_context.norm, 140581767533984) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:139 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].norm1_context.norm.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].norm1_context.norm.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].norm1_context.norm.training, 140591004393440) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:139 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].norm1_context.silu, accessed_by=DictGetItemGuardAccessor(silu) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].norm1_context.silu, 140581767533888) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].norm1_context.silu.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].norm1_context.silu.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].norm1_context.silu.training, 140591004393440) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].norm1_context.linear, accessed_by=DictGetItemGuardAccessor(linear) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].norm1_context.linear, 140533120767152) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].norm1_context.linear.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[4].norm1_context.linear.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].norm1_context.linear.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].norm1_context.linear.training, 140591004393408) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].norm1_context.linear._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].norm1_context.linear.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].norm1_context.linear.lora_A, 140533120806112) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].norm1_context.linear.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].norm1_context.linear.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].norm1_context.linear.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].norm1_context.linear.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].norm1_context.linear.lora_A['default_0'], 140533120810864) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].norm1_context.linear.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].norm1_context.linear.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].norm1_context.linear.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].norm1_context.linear.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].norm1_context.linear.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].norm1_context.linear.lora_A['default_0'].weight, 140537312726688) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].norm1_context.linear.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].norm1_context.linear.lora_B, 140533120795888) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].norm1_context.linear.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].norm1_context.linear.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].norm1_context.linear.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].norm1_context.linear.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].norm1_context.linear.lora_B['default_0'], 140533120810768) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].norm1_context.linear.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].norm1_context.linear.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].norm1_context.linear.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].norm1_context.linear.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].norm1_context.linear.base_layer, 140581767533936) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].norm1_context.linear.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].norm1_context.linear.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].norm1_context.linear.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].norm1_context.linear.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].norm1_context.linear.lora_dropout, 140533120805296) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].norm1_context.linear.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].norm1_context.linear.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].norm1_context.linear.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].norm1_context.linear.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].norm1_context.linear.lora_dropout['default_0'], 140533120799056) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].norm1_context.linear.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].norm1_context.linear.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].norm1_context.linear.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].norm1_context.linear.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[4].norm1_context.linear.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[4].norm1_context.linear.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].norm1_context.linear.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[4].norm1_context.linear.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].norm1_context.linear.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[4].norm1_context.linear.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[4].norm1_context.linear.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].norm1_context.linear.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].norm1_context.linear.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].norm1_context.linear._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].norm1_context.linear._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].norm1_context.linear.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[4].norm1_context.linear.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[4].norm1_context.linear.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].norm1_context.linear._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].norm1_context.linear._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].norm1_context.linear._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].norm1_context.linear._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].norm1_context.linear._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[4].norm1_context.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[4].norm1_context.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].norm1_context._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].norm1_context._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].norm1_context._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].norm1_context._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].norm2_context, accessed_by=DictGetItemGuardAccessor(norm2_context) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].norm2_context, 140581767534992) # norm_encoder_hidden_states = self.norm2_context(encoder_hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:195 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].norm2_context.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].norm2_context.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].norm2_context.training, 140591004393440) # norm_encoder_hidden_states = self.norm2_context(encoder_hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:195 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | +- GuardManager: source=L['self'].transformer_blocks[5], accessed_by=GetItemGuardAccessor(5) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5], 140581767533264) # for index_block, block in enumerate(self.transformer_blocks): # diffusers/src/diffusers/models/transformers/transformer_flux.py:471 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[5].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].training, 140591004393440) # for index_block, block in enumerate(self.transformer_blocks): # diffusers/src/diffusers/models/transformers/transformer_flux.py:471 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff, accessed_by=DictGetItemGuardAccessor(ff) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].ff, 140581767536912) # ff_output = self.ff(norm_hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:185 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[5].ff.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].ff.training, 140591004393440) # ff_output = self.ff(norm_hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:185 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff.net, accessed_by=DictGetItemGuardAccessor(net) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].ff.net, 140581767537152) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[5].ff.net, 93831537618768) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- LENGTH_CHECK: len(L['self'].transformer_blocks[5].ff.net) == 3 # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff.net.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff.net.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].ff.net.training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff.net[0], accessed_by=GetItemGuardAccessor(0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].ff.net[0], 140581767537104) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff.net[0].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[5].ff.net[0].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff.net[0].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].ff.net[0].training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff.net[0]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff.net[0].proj, accessed_by=DictGetItemGuardAccessor(proj) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].ff.net[0].proj, 140533121775648) # hidden_states = self.proj(hidden_states) # diffusers/src/diffusers/models/activations.py:88 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff.net[0].proj.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[5].ff.net[0].proj.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff.net[0].proj.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].ff.net[0].proj.training, 140591004393408) # hidden_states = self.proj(hidden_states) # diffusers/src/diffusers/models/activations.py:88 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff.net[0].proj._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff.net[0].proj.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].ff.net[0].proj.lora_A, 140533121762832) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff.net[0].proj.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff.net[0].proj.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].ff.net[0].proj.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff.net[0].proj.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].ff.net[0].proj.lora_A['default_0'], 140533121770704) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff.net[0].proj.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff.net[0].proj.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].ff.net[0].proj.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff.net[0].proj.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff.net[0].proj.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].ff.net[0].proj.lora_A['default_0'].weight, 140537312140704) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff.net[0].proj.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].ff.net[0].proj.lora_B, 140533121766576) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff.net[0].proj.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff.net[0].proj.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].ff.net[0].proj.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff.net[0].proj.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].ff.net[0].proj.lora_B['default_0'], 140533121769936) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff.net[0].proj.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff.net[0].proj.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].ff.net[0].proj.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff.net[0].proj.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].ff.net[0].proj.base_layer, 140581767537200) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff.net[0].proj.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff.net[0].proj.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].ff.net[0].proj.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff.net[0].proj.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].ff.net[0].proj.lora_dropout, 140533121763888) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff.net[0].proj.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff.net[0].proj.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].ff.net[0].proj.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff.net[0].proj.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].ff.net[0].proj.lora_dropout['default_0'], 140533121773056) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff.net[0].proj.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff.net[0].proj.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].ff.net[0].proj.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff.net[0].proj.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[5].ff.net[0].proj.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[5].ff.net[0].proj.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff.net[0].proj.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[5].ff.net[0].proj.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff.net[0].proj.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[5].ff.net[0].proj.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[5].ff.net[0].proj.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff.net[0].proj.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].ff.net[0].proj.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff.net[0].proj._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff.net[0].proj._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff.net[0].proj.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[5].ff.net[0].proj.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[5].ff.net[0].proj.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff.net[0].proj._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].ff.net[0].proj._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff.net[0].proj._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff.net[0].proj._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff.net[0].proj._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[5].ff.net[0].proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[5].ff.net[0].proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff.net[0].approximate, accessed_by=DictGetItemGuardAccessor(approximate) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[5].ff.net[0].approximate == 'tanh' # return F.gelu(gate, approximate=self.approximate) # diffusers/src/diffusers/models/activations.py:83 in gelu V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff.net[0]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff.net[0]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff.net[0]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff.net[0]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff.net[1], accessed_by=GetItemGuardAccessor(1) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].ff.net[1], 140581767537248) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff.net[1].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff.net[1].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].ff.net[1].training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff.net[2], accessed_by=GetItemGuardAccessor(2) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].ff.net[2], 140533121774928) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff.net[2].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[5].ff.net[2].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff.net[2].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].ff.net[2].training, 140591004393408) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff.net[2]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff.net[2].lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].ff.net[2].lora_A, 140533121762688) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff.net[2].lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff.net[2].lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].ff.net[2].lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff.net[2].lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].ff.net[2].lora_A['default_0'], 140533118863824) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff.net[2].lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff.net[2].lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].ff.net[2].lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff.net[2].lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff.net[2].lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].ff.net[2].lora_A['default_0'].weight, 140537312142544) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff.net[2].lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].ff.net[2].lora_B, 140533118871408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff.net[2].lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff.net[2].lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].ff.net[2].lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff.net[2].lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].ff.net[2].lora_B['default_0'], 140533118870736) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff.net[2].lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff.net[2].lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].ff.net[2].lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff.net[2].base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].ff.net[2].base_layer, 140581767537296) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff.net[2].base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff.net[2].base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].ff.net[2].base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff.net[2].lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].ff.net[2].lora_dropout, 140533121773776) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff.net[2].lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff.net[2].lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].ff.net[2].lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff.net[2].lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].ff.net[2].lora_dropout['default_0'], 140533121764608) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff.net[2].lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff.net[2].lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].ff.net[2].lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff.net[2].scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[5].ff.net[2].scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[5].ff.net[2].scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff.net[2].scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[5].ff.net[2].scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff.net[2].use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[5].ff.net[2].use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[5].ff.net[2].use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff.net[2].use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].ff.net[2].use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff.net[2]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff.net[2]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff.net[2].merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[5].ff.net[2].merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[5].ff.net[2].merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff.net[2]._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].ff.net[2]._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff.net[2]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff.net[2]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff.net[2]._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[5].ff.net[2]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[5].ff.net[2]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn, accessed_by=DictGetItemGuardAccessor(attn) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn, 140581767536144) # attn_output, context_attn_output = self.attn( # diffusers/src/diffusers/models/transformers/transformer_flux.py:172 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[5].attn.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.training, 140591004393440) # attn_output, context_attn_output = self.attn( # diffusers/src/diffusers/models/transformers/transformer_flux.py:172 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_k, accessed_by=DictGetItemGuardAccessor(to_k) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.to_k, 140533120886016) # key = attn.to_k(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1717 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[5].attn.to_k.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.to_k.training, 140591004393408) # key = attn.to_k(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1717 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_k._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_k.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.to_k.lora_A, 140533120378496) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_k.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_k.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.to_k.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_k.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.to_k.lora_A['default_0'], 140533120379072) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_k.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_k.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.to_k.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_k.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_k.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.to_k.lora_A['default_0'].weight, 140537312291440) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_k.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.to_k.lora_B, 140533120373600) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_k.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_k.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.to_k.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_k.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.to_k.lora_B['default_0'], 140533120380560) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_k.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_k.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.to_k.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_k.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.to_k.base_layer, 140581767536288) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_k.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_k.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.to_k.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_k.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.to_k.lora_dropout, 140533120881696) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_k.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_k.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.to_k.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_k.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.to_k.lora_dropout['default_0'], 140533120878624) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_k.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_k.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.to_k.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_k.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[5].attn.to_k.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[5].attn.to_k.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_k.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[5].attn.to_k.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_k.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[5].attn.to_k.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[5].attn.to_k.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_k.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.to_k.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_k._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_k._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_k.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[5].attn.to_k.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[5].attn.to_k.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_k._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.to_k._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_k._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_k._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_k._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[5].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[5].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_q, accessed_by=DictGetItemGuardAccessor(to_q) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.to_q, 140533120883712) # query = attn.to_q(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1716 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[5].attn.to_q.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.to_q.training, 140591004393408) # query = attn.to_q(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1716 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_q._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_q.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.to_q.lora_A, 140533120882992) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_q.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_q.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.to_q.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_q.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.to_q.lora_A['default_0'], 140533120885296) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_q.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_q.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.to_q.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_q.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_q.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.to_q.lora_A['default_0'].weight, 140537312286480) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_q.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.to_q.lora_B, 140533120876608) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_q.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_q.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.to_q.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_q.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.to_q.lora_B['default_0'], 140533120886352) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_q.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_q.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.to_q.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_q.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.to_q.base_layer, 140581767536384) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_q.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_q.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.to_q.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_q.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.to_q.lora_dropout, 140533120883232) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_q.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_q.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.to_q.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_q.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.to_q.lora_dropout['default_0'], 140533120883424) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_q.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_q.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.to_q.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_q.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[5].attn.to_q.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[5].attn.to_q.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_q.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[5].attn.to_q.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_q.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[5].attn.to_q.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[5].attn.to_q.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_q.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.to_q.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_q._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_q._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_q.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[5].attn.to_q.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[5].attn.to_q.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_q._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.to_q._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_q._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_q._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_q._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[5].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[5].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_v, accessed_by=DictGetItemGuardAccessor(to_v) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.to_v, 140533120380368) # value = attn.to_v(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1718 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_v.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[5].attn.to_v.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_v.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.to_v.training, 140591004393408) # value = attn.to_v(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1718 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_v._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_v.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.to_v.lora_A, 140533120380896) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_v.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_v.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.to_v.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_v.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.to_v.lora_A['default_0'], 140533120378880) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_v.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_v.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.to_v.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_v.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_v.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.to_v.lora_A['default_0'].weight, 140537312292480) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_v.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.to_v.lora_B, 140533120378784) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_v.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_v.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.to_v.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_v.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.to_v.lora_B['default_0'], 140533120373360) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_v.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_v.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.to_v.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_v.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.to_v.base_layer, 140581767536480) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_v.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_v.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.to_v.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_v.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.to_v.lora_dropout, 140533120380416) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_v.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_v.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.to_v.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_v.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.to_v.lora_dropout['default_0'], 140533120379792) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_v.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_v.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.to_v.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_v.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[5].attn.to_v.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[5].attn.to_v.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_v.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[5].attn.to_v.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_v.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[5].attn.to_v.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[5].attn.to_v.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_v.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.to_v.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_v._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_v._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_v.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[5].attn.to_v.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[5].attn.to_v.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_v._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.to_v._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_v._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_v._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_v._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[5].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[5].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.norm_k, accessed_by=DictGetItemGuardAccessor(norm_k) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.norm_k, 140581767536336) # if attn.norm_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1729 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.norm_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[5].attn.norm_k.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.norm_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.norm_k.training, 140591004393440) # if attn.norm_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1729 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.norm_k.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[5].attn.norm_k.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.norm_k._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.norm_k.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.norm_k.weight, 140581765824816) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.norm_k._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.norm_k._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.norm_k._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.norm_k._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.norm_q, accessed_by=DictGetItemGuardAccessor(norm_q) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.norm_q, 140581767536240) # if attn.norm_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1727 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.norm_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[5].attn.norm_q.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.norm_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.norm_q.training, 140591004393440) # if attn.norm_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1727 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.norm_q.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[5].attn.norm_q.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.norm_q._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.norm_q.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.norm_q.weight, 140581772716176) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.norm_q._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.norm_q._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.norm_q._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.norm_q._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_out, accessed_by=DictGetItemGuardAccessor(to_out) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.to_out, 140581767536672) # hidden_states = attn.to_out[0](hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1776 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_out.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_out.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.to_out.training, 140591004393440) # hidden_states = attn.to_out[0](hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1776 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_out[0], accessed_by=GetItemGuardAccessor(0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.to_out[0], 140533119391616) # hidden_states = attn.to_out[0](hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1776 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_out[0].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[5].attn.to_out[0].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_out[0].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.to_out[0].training, 140591004393408) # hidden_states = attn.to_out[0](hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1776 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_out[0]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_out[0].lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.to_out[0].lora_A, 140533118886928) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_out[0].lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_out[0].lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.to_out[0].lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_out[0].lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.to_out[0].lora_A['default_0'], 140533118890096) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_out[0].lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_out[0].lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.to_out[0].lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_out[0].lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_out[0].lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.to_out[0].lora_A['default_0'].weight, 140537312147584) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_out[0].lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.to_out[0].lora_B, 140533118885968) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_out[0].lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_out[0].lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.to_out[0].lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_out[0].lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.to_out[0].lora_B['default_0'], 140533118893120) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_out[0].lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_out[0].lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.to_out[0].lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_out[0].base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.to_out[0].base_layer, 140581767536720) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_out[0].base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_out[0].base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.to_out[0].base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_out[0].lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.to_out[0].lora_dropout, 140533118883712) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_out[0].lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_out[0].lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.to_out[0].lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_out[0].lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.to_out[0].lora_dropout['default_0'], 140533119393584) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_out[0].lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_out[0].lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.to_out[0].lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_out[0].scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[5].attn.to_out[0].scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[5].attn.to_out[0].scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_out[0].scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[5].attn.to_out[0].scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_out[0].use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[5].attn.to_out[0].use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[5].attn.to_out[0].use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_out[0].use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.to_out[0].use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_out[0]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_out[0]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_out[0].merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[5].attn.to_out[0].merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[5].attn.to_out[0].merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_out[0]._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.to_out[0]._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_out[0]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_out[0]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_out[0]._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[5].attn.to_out[0]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[5].attn.to_out[0]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_out[1], accessed_by=GetItemGuardAccessor(1) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.to_out[1], 140581767536768) # hidden_states = attn.to_out[1](hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1778 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_out[1].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_out[1].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.to_out[1].training, 140591004393440) # hidden_states = attn.to_out[1](hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1778 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.add_k_proj, accessed_by=DictGetItemGuardAccessor(add_k_proj) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.add_k_proj, 140533120384496) # encoder_hidden_states_key_proj = attn.add_k_proj(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1736 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.add_k_proj.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[5].attn.add_k_proj.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.add_k_proj.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.add_k_proj.training, 140591004393408) # encoder_hidden_states_key_proj = attn.add_k_proj(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1736 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.add_k_proj._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.add_k_proj.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.add_k_proj.lora_A, 140533120382288) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.add_k_proj.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.add_k_proj.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.add_k_proj.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.add_k_proj.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.add_k_proj.lora_A['default_0'], 140533120376192) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.add_k_proj.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.add_k_proj.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.add_k_proj.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.add_k_proj.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.add_k_proj.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.add_k_proj.lora_A['default_0'].weight, 140537312290800) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.add_k_proj.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.add_k_proj.lora_B, 140533120381568) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.add_k_proj.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.add_k_proj.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.add_k_proj.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.add_k_proj.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.add_k_proj.lora_B['default_0'], 140533120368896) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.add_k_proj.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.add_k_proj.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.add_k_proj.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.add_k_proj.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.add_k_proj.base_layer, 140581767536528) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.add_k_proj.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.add_k_proj.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.add_k_proj.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.add_k_proj.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.add_k_proj.lora_dropout, 140533120375904) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.add_k_proj.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.add_k_proj.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.add_k_proj.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.add_k_proj.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.add_k_proj.lora_dropout['default_0'], 140533120383872) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.add_k_proj.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.add_k_proj.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.add_k_proj.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.add_k_proj.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[5].attn.add_k_proj.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[5].attn.add_k_proj.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.add_k_proj.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[5].attn.add_k_proj.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.add_k_proj.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[5].attn.add_k_proj.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[5].attn.add_k_proj.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.add_k_proj.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.add_k_proj.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.add_k_proj._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.add_k_proj._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.add_k_proj.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[5].attn.add_k_proj.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[5].attn.add_k_proj.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.add_k_proj._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.add_k_proj._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.add_k_proj._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.add_k_proj._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.add_k_proj._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[5].attn.add_k_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[5].attn.add_k_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.add_q_proj, accessed_by=DictGetItemGuardAccessor(add_q_proj) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.add_q_proj, 140533120481264) # encoder_hidden_states_query_proj = attn.add_q_proj(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1735 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.add_q_proj.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[5].attn.add_q_proj.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.add_q_proj.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.add_q_proj.training, 140591004393408) # encoder_hidden_states_query_proj = attn.add_q_proj(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1735 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.add_q_proj._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.add_q_proj.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.add_q_proj.lora_A, 140533120476272) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.add_q_proj.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.add_q_proj.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.add_q_proj.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.add_q_proj.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.add_q_proj.lora_A['default_0'], 140533119387632) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.add_q_proj.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.add_q_proj.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.add_q_proj.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.add_q_proj.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.add_q_proj.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.add_q_proj.lora_A['default_0'].weight, 140537312281680) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.add_q_proj.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.add_q_proj.lora_B, 140533120480832) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.add_q_proj.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.add_q_proj.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.add_q_proj.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.add_q_proj.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.add_q_proj.lora_B['default_0'], 140533119400496) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.add_q_proj.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.add_q_proj.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.add_q_proj.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.add_q_proj.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.add_q_proj.base_layer, 140581767536624) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.add_q_proj.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.add_q_proj.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.add_q_proj.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.add_q_proj.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.add_q_proj.lora_dropout, 140533120481312) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.add_q_proj.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.add_q_proj.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.add_q_proj.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.add_q_proj.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.add_q_proj.lora_dropout['default_0'], 140533120478192) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.add_q_proj.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.add_q_proj.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.add_q_proj.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.add_q_proj.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[5].attn.add_q_proj.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[5].attn.add_q_proj.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.add_q_proj.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[5].attn.add_q_proj.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.add_q_proj.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[5].attn.add_q_proj.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[5].attn.add_q_proj.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.add_q_proj.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.add_q_proj.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.add_q_proj._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.add_q_proj._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.add_q_proj.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[5].attn.add_q_proj.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[5].attn.add_q_proj.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.add_q_proj._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.add_q_proj._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.add_q_proj._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.add_q_proj._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.add_q_proj._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[5].attn.add_q_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[5].attn.add_q_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.add_v_proj, accessed_by=DictGetItemGuardAccessor(add_v_proj) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.add_v_proj, 140533120381232) # encoder_hidden_states_value_proj = attn.add_v_proj(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1737 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.add_v_proj.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[5].attn.add_v_proj.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.add_v_proj.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.add_v_proj.training, 140591004393408) # encoder_hidden_states_value_proj = attn.add_v_proj(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1737 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.add_v_proj._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.add_v_proj.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.add_v_proj.lora_A, 140533120469264) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.add_v_proj.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.add_v_proj.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.add_v_proj.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.add_v_proj.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.add_v_proj.lora_A['default_0'], 140533120481456) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.add_v_proj.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.add_v_proj.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.add_v_proj.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.add_v_proj.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.add_v_proj.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.add_v_proj.lora_A['default_0'].weight, 140537312285120) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.add_v_proj.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.add_v_proj.lora_B, 140533120477472) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.add_v_proj.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.add_v_proj.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.add_v_proj.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.add_v_proj.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.add_v_proj.lora_B['default_0'], 140533120469360) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.add_v_proj.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.add_v_proj.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.add_v_proj.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.add_v_proj.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.add_v_proj.base_layer, 140581767536576) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.add_v_proj.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.add_v_proj.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.add_v_proj.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.add_v_proj.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.add_v_proj.lora_dropout, 140533120382096) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.add_v_proj.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.add_v_proj.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.add_v_proj.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.add_v_proj.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.add_v_proj.lora_dropout['default_0'], 140533120376816) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.add_v_proj.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.add_v_proj.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.add_v_proj.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.add_v_proj.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[5].attn.add_v_proj.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[5].attn.add_v_proj.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.add_v_proj.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[5].attn.add_v_proj.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.add_v_proj.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[5].attn.add_v_proj.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[5].attn.add_v_proj.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.add_v_proj.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.add_v_proj.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.add_v_proj._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.add_v_proj._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.add_v_proj.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[5].attn.add_v_proj.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[5].attn.add_v_proj.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.add_v_proj._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.add_v_proj._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.add_v_proj._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.add_v_proj._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.add_v_proj._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[5].attn.add_v_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[5].attn.add_v_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_add_out, accessed_by=DictGetItemGuardAccessor(to_add_out) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.to_add_out, 140533118892160) # encoder_hidden_states = attn.to_add_out(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1779 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_add_out.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[5].attn.to_add_out.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_add_out.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.to_add_out.training, 140591004393408) # encoder_hidden_states = attn.to_add_out(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1779 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_add_out._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_add_out.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.to_add_out.lora_A, 140533118892928) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_add_out.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_add_out.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.to_add_out.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_add_out.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.to_add_out.lora_A['default_0'], 140533121771472) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_add_out.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_add_out.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.to_add_out.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_add_out.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_add_out.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.to_add_out.lora_A['default_0'].weight, 140537312139584) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_add_out.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.to_add_out.lora_B, 140533118880880) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_add_out.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_add_out.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.to_add_out.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_add_out.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.to_add_out.lora_B['default_0'], 140533121762976) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_add_out.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_add_out.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.to_add_out.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_add_out.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.to_add_out.base_layer, 140581767536816) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_add_out.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_add_out.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.to_add_out.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_add_out.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.to_add_out.lora_dropout, 140533118887840) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_add_out.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_add_out.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.to_add_out.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_add_out.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.to_add_out.lora_dropout['default_0'], 140533118879824) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_add_out.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_add_out.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.to_add_out.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_add_out.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[5].attn.to_add_out.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[5].attn.to_add_out.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_add_out.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[5].attn.to_add_out.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_add_out.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[5].attn.to_add_out.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[5].attn.to_add_out.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_add_out.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.to_add_out.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_add_out._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_add_out._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_add_out.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[5].attn.to_add_out.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[5].attn.to_add_out.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_add_out._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.to_add_out._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_add_out._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_add_out._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_add_out._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[5].attn.to_add_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[5].attn.to_add_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.norm_added_k, accessed_by=DictGetItemGuardAccessor(norm_added_k) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.norm_added_k, 140581767536960) # if attn.norm_added_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1751 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.norm_added_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[5].attn.norm_added_k.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.norm_added_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.norm_added_k.training, 140591004393440) # if attn.norm_added_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1751 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.norm_added_k.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[5].attn.norm_added_k.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.norm_added_k._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.norm_added_k.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.norm_added_k.weight, 140581772712736) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.norm_added_k._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.norm_added_k._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.norm_added_k._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.norm_added_k._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.norm_added_q, accessed_by=DictGetItemGuardAccessor(norm_added_q) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.norm_added_q, 140581767536864) # if attn.norm_added_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1749 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.norm_added_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[5].attn.norm_added_q.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.norm_added_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.norm_added_q.training, 140591004393440) # if attn.norm_added_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1749 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.norm_added_q.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[5].attn.norm_added_q.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.norm_added_q._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.norm_added_q.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.norm_added_q.weight, 140581772711536) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.norm_added_q._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.norm_added_q._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.norm_added_q._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.norm_added_q._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.heads, accessed_by=DictGetItemGuardAccessor(heads) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[5].attn.heads == 24 # head_dim = inner_dim // attn.heads # diffusers/src/diffusers/models/attention_processor.py:1721 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.processor, accessed_by=DictGetItemGuardAccessor(processor) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[5].attn.processor, 93831581524080) # attn_parameters = set(inspect.signature(self.processor.__call__).parameters.keys()) # diffusers/src/diffusers/models/attention_processor.py:479 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.processor, 140581767536096) # return self.processor( # diffusers/src/diffusers/models/attention_processor.py:490 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].norm1, accessed_by=DictGetItemGuardAccessor(norm1) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].norm1, 140581767535664) # norm_hidden_states, gate_msa, shift_mlp, scale_mlp, gate_mlp = self.norm1(hidden_states, emb=temb) # diffusers/src/diffusers/models/transformers/transformer_flux.py:165 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].norm1.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[5].norm1.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].norm1.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].norm1.training, 140591004393440) # norm_hidden_states, gate_msa, shift_mlp, scale_mlp, gate_mlp = self.norm1(hidden_states, emb=temb) # diffusers/src/diffusers/models/transformers/transformer_flux.py:165 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].norm1.emb, accessed_by=DictGetItemGuardAccessor(emb) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].norm1.emb, 140591004478624) # if self.emb is not None: # diffusers/src/diffusers/models/normalization.py:135 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].norm1._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].norm1.norm, accessed_by=DictGetItemGuardAccessor(norm) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].norm1.norm, 140581767535808) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:139 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].norm1.norm.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].norm1.norm.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].norm1.norm.training, 140591004393440) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:139 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].norm1.silu, accessed_by=DictGetItemGuardAccessor(silu) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].norm1.silu, 140581767535712) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].norm1.silu.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].norm1.silu.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].norm1.silu.training, 140591004393440) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].norm1.linear, accessed_by=DictGetItemGuardAccessor(linear) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].norm1.linear, 140533120884768) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].norm1.linear.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[5].norm1.linear.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].norm1.linear.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].norm1.linear.training, 140591004393408) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].norm1.linear._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].norm1.linear.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].norm1.linear.lora_A, 140533120879104) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].norm1.linear.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].norm1.linear.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].norm1.linear.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].norm1.linear.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].norm1.linear.lora_A['default_0'], 140533120880544) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].norm1.linear.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].norm1.linear.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].norm1.linear.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].norm1.linear.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].norm1.linear.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].norm1.linear.lora_A['default_0'].weight, 140537312466144) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].norm1.linear.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].norm1.linear.lora_B, 140533120890672) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].norm1.linear.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].norm1.linear.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].norm1.linear.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].norm1.linear.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].norm1.linear.lora_B['default_0'], 140533120881072) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].norm1.linear.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].norm1.linear.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].norm1.linear.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].norm1.linear.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].norm1.linear.base_layer, 140581767535760) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].norm1.linear.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].norm1.linear.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].norm1.linear.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].norm1.linear.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].norm1.linear.lora_dropout, 140533120883040) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].norm1.linear.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].norm1.linear.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].norm1.linear.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].norm1.linear.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].norm1.linear.lora_dropout['default_0'], 140533120883904) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].norm1.linear.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].norm1.linear.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].norm1.linear.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].norm1.linear.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[5].norm1.linear.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[5].norm1.linear.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].norm1.linear.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[5].norm1.linear.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].norm1.linear.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[5].norm1.linear.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[5].norm1.linear.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].norm1.linear.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].norm1.linear.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].norm1.linear._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].norm1.linear._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].norm1.linear.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[5].norm1.linear.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[5].norm1.linear.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].norm1.linear._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].norm1.linear._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].norm1.linear._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].norm1.linear._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].norm1.linear._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[5].norm1.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[5].norm1.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].norm1._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].norm1._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].norm1._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].norm1._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].norm2, accessed_by=DictGetItemGuardAccessor(norm2) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].norm2, 140581767537008) # norm_hidden_states = self.norm2(hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:182 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].norm2.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].norm2.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].norm2.training, 140591004393440) # norm_hidden_states = self.norm2(hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:182 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff_context, accessed_by=DictGetItemGuardAccessor(ff_context) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].ff_context, 140581767537344) # context_ff_output = self.ff_context(norm_encoder_hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:198 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff_context.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[5].ff_context.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff_context.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].ff_context.training, 140591004393440) # context_ff_output = self.ff_context(norm_encoder_hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:198 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff_context._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff_context.net, accessed_by=DictGetItemGuardAccessor(net) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].ff_context.net, 140581767537488) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[5].ff_context.net, 93831537618768) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- LENGTH_CHECK: len(L['self'].transformer_blocks[5].ff_context.net) == 3 # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff_context.net.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff_context.net.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].ff_context.net.training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff_context.net[0], accessed_by=GetItemGuardAccessor(0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].ff_context.net[0], 140581767537440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff_context.net[0].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[5].ff_context.net[0].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff_context.net[0].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].ff_context.net[0].training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff_context.net[0]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff_context.net[0].proj, accessed_by=DictGetItemGuardAccessor(proj) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].ff_context.net[0].proj, 140533118870880) # hidden_states = self.proj(hidden_states) # diffusers/src/diffusers/models/activations.py:88 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff_context.net[0].proj.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[5].ff_context.net[0].proj.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff_context.net[0].proj.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].ff_context.net[0].proj.training, 140591004393408) # hidden_states = self.proj(hidden_states) # diffusers/src/diffusers/models/activations.py:88 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff_context.net[0].proj._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff_context.net[0].proj.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].ff_context.net[0].proj.lora_A, 140533118865312) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff_context.net[0].proj.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff_context.net[0].proj.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].ff_context.net[0].proj.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff_context.net[0].proj.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].ff_context.net[0].proj.lora_A['default_0'], 140533119139648) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff_context.net[0].proj.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff_context.net[0].proj.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].ff_context.net[0].proj.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff_context.net[0].proj.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff_context.net[0].proj.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].ff_context.net[0].proj.lora_A['default_0'].weight, 140537312134144) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff_context.net[0].proj.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].ff_context.net[0].proj.lora_B, 140533118861712) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff_context.net[0].proj.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff_context.net[0].proj.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].ff_context.net[0].proj.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff_context.net[0].proj.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].ff_context.net[0].proj.lora_B['default_0'], 140533119134512) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff_context.net[0].proj.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff_context.net[0].proj.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].ff_context.net[0].proj.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff_context.net[0].proj.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].ff_context.net[0].proj.base_layer, 140581767537536) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff_context.net[0].proj.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff_context.net[0].proj.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].ff_context.net[0].proj.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff_context.net[0].proj.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].ff_context.net[0].proj.lora_dropout, 140533118865936) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff_context.net[0].proj.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff_context.net[0].proj.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].ff_context.net[0].proj.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff_context.net[0].proj.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].ff_context.net[0].proj.lora_dropout['default_0'], 140533118869776) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff_context.net[0].proj.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff_context.net[0].proj.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].ff_context.net[0].proj.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff_context.net[0].proj.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[5].ff_context.net[0].proj.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[5].ff_context.net[0].proj.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff_context.net[0].proj.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[5].ff_context.net[0].proj.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff_context.net[0].proj.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[5].ff_context.net[0].proj.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[5].ff_context.net[0].proj.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff_context.net[0].proj.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].ff_context.net[0].proj.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff_context.net[0].proj._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff_context.net[0].proj._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff_context.net[0].proj.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[5].ff_context.net[0].proj.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[5].ff_context.net[0].proj.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff_context.net[0].proj._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].ff_context.net[0].proj._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff_context.net[0].proj._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff_context.net[0].proj._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff_context.net[0].proj._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[5].ff_context.net[0].proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[5].ff_context.net[0].proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff_context.net[0].approximate, accessed_by=DictGetItemGuardAccessor(approximate) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[5].ff_context.net[0].approximate == 'tanh' # return F.gelu(gate, approximate=self.approximate) # diffusers/src/diffusers/models/activations.py:83 in gelu V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff_context.net[0]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff_context.net[0]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff_context.net[0]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff_context.net[0]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff_context.net[1], accessed_by=GetItemGuardAccessor(1) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].ff_context.net[1], 140581767537632) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff_context.net[1].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff_context.net[1].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].ff_context.net[1].training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff_context.net[2], accessed_by=GetItemGuardAccessor(2) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].ff_context.net[2], 140533119131056) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff_context.net[2].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[5].ff_context.net[2].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff_context.net[2].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].ff_context.net[2].training, 140591004393408) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff_context.net[2]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff_context.net[2].lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].ff_context.net[2].lora_A, 140533119134416) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff_context.net[2].lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff_context.net[2].lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].ff_context.net[2].lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff_context.net[2].lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].ff_context.net[2].lora_A['default_0'], 140533119126976) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff_context.net[2].lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff_context.net[2].lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].ff_context.net[2].lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff_context.net[2].lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff_context.net[2].lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].ff_context.net[2].lora_A['default_0'].weight, 140537312137104) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff_context.net[2].lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].ff_context.net[2].lora_B, 140533119139168) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff_context.net[2].lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff_context.net[2].lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].ff_context.net[2].lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff_context.net[2].lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].ff_context.net[2].lora_B['default_0'], 140533119128512) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff_context.net[2].lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff_context.net[2].lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].ff_context.net[2].lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff_context.net[2].base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].ff_context.net[2].base_layer, 140581767537680) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff_context.net[2].base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff_context.net[2].base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].ff_context.net[2].base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff_context.net[2].lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].ff_context.net[2].lora_dropout, 140533119139504) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff_context.net[2].lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff_context.net[2].lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].ff_context.net[2].lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff_context.net[2].lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].ff_context.net[2].lora_dropout['default_0'], 140533119131488) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff_context.net[2].lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff_context.net[2].lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].ff_context.net[2].lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff_context.net[2].scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[5].ff_context.net[2].scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[5].ff_context.net[2].scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff_context.net[2].scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[5].ff_context.net[2].scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff_context.net[2].use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[5].ff_context.net[2].use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[5].ff_context.net[2].use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff_context.net[2].use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].ff_context.net[2].use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff_context.net[2]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff_context.net[2]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff_context.net[2].merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[5].ff_context.net[2].merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[5].ff_context.net[2].merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff_context.net[2]._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].ff_context.net[2]._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff_context.net[2]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff_context.net[2]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff_context.net[2]._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[5].ff_context.net[2]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[5].ff_context.net[2]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff_context._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff_context._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff_context._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff_context._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].norm1_context, accessed_by=DictGetItemGuardAccessor(norm1_context) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].norm1_context, 140581767535856) # norm_encoder_hidden_states, c_gate_msa, c_shift_mlp, c_scale_mlp, c_gate_mlp = self.norm1_context( # diffusers/src/diffusers/models/transformers/transformer_flux.py:167 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].norm1_context.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[5].norm1_context.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].norm1_context.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].norm1_context.training, 140591004393440) # norm_encoder_hidden_states, c_gate_msa, c_shift_mlp, c_scale_mlp, c_gate_mlp = self.norm1_context( # diffusers/src/diffusers/models/transformers/transformer_flux.py:167 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].norm1_context.emb, accessed_by=DictGetItemGuardAccessor(emb) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].norm1_context.emb, 140591004478624) # if self.emb is not None: # diffusers/src/diffusers/models/normalization.py:135 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].norm1_context._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].norm1_context.norm, accessed_by=DictGetItemGuardAccessor(norm) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].norm1_context.norm, 140581767536048) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:139 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].norm1_context.norm.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].norm1_context.norm.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].norm1_context.norm.training, 140591004393440) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:139 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].norm1_context.silu, accessed_by=DictGetItemGuardAccessor(silu) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].norm1_context.silu, 140581767535952) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].norm1_context.silu.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].norm1_context.silu.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].norm1_context.silu.training, 140591004393440) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].norm1_context.linear, accessed_by=DictGetItemGuardAccessor(linear) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].norm1_context.linear, 140533120880928) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].norm1_context.linear.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[5].norm1_context.linear.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].norm1_context.linear.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].norm1_context.linear.training, 140591004393408) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].norm1_context.linear._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].norm1_context.linear.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].norm1_context.linear.lora_A, 140533120879536) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].norm1_context.linear.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].norm1_context.linear.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].norm1_context.linear.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].norm1_context.linear.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].norm1_context.linear.lora_A['default_0'], 140533120878768) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].norm1_context.linear.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].norm1_context.linear.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].norm1_context.linear.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].norm1_context.linear.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].norm1_context.linear.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].norm1_context.linear.lora_A['default_0'].weight, 140537312461584) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].norm1_context.linear.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].norm1_context.linear.lora_B, 140533120877664) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].norm1_context.linear.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].norm1_context.linear.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].norm1_context.linear.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].norm1_context.linear.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].norm1_context.linear.lora_B['default_0'], 140533120883136) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].norm1_context.linear.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].norm1_context.linear.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].norm1_context.linear.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].norm1_context.linear.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].norm1_context.linear.base_layer, 140581767536000) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].norm1_context.linear.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].norm1_context.linear.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].norm1_context.linear.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].norm1_context.linear.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].norm1_context.linear.lora_dropout, 140533120879680) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].norm1_context.linear.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].norm1_context.linear.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].norm1_context.linear.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].norm1_context.linear.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].norm1_context.linear.lora_dropout['default_0'], 140533120880496) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].norm1_context.linear.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].norm1_context.linear.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].norm1_context.linear.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].norm1_context.linear.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[5].norm1_context.linear.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[5].norm1_context.linear.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].norm1_context.linear.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[5].norm1_context.linear.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].norm1_context.linear.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[5].norm1_context.linear.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[5].norm1_context.linear.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].norm1_context.linear.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].norm1_context.linear.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].norm1_context.linear._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].norm1_context.linear._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].norm1_context.linear.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[5].norm1_context.linear.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[5].norm1_context.linear.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].norm1_context.linear._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].norm1_context.linear._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].norm1_context.linear._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].norm1_context.linear._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].norm1_context.linear._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[5].norm1_context.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[5].norm1_context.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].norm1_context._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].norm1_context._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].norm1_context._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].norm1_context._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].norm2_context, accessed_by=DictGetItemGuardAccessor(norm2_context) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].norm2_context, 140581767537056) # norm_encoder_hidden_states = self.norm2_context(encoder_hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:195 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].norm2_context.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].norm2_context.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].norm2_context.training, 140591004393440) # norm_encoder_hidden_states = self.norm2_context(encoder_hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:195 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | +- GuardManager: source=L['self'].transformer_blocks[6], accessed_by=GetItemGuardAccessor(6) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6], 140581767535328) # for index_block, block in enumerate(self.transformer_blocks): # diffusers/src/diffusers/models/transformers/transformer_flux.py:471 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[6].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].training, 140591004393440) # for index_block, block in enumerate(self.transformer_blocks): # diffusers/src/diffusers/models/transformers/transformer_flux.py:471 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff, accessed_by=DictGetItemGuardAccessor(ff) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].ff, 140581767538976) # ff_output = self.ff(norm_hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:185 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[6].ff.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].ff.training, 140591004393440) # ff_output = self.ff(norm_hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:185 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff.net, accessed_by=DictGetItemGuardAccessor(net) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].ff.net, 140581767539216) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[6].ff.net, 93831537618768) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- LENGTH_CHECK: len(L['self'].transformer_blocks[6].ff.net) == 3 # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff.net.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff.net.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].ff.net.training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff.net[0], accessed_by=GetItemGuardAccessor(0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].ff.net[0], 140581767539168) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff.net[0].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[6].ff.net[0].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff.net[0].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].ff.net[0].training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff.net[0]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff.net[0].proj, accessed_by=DictGetItemGuardAccessor(proj) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].ff.net[0].proj, 140533119326656) # hidden_states = self.proj(hidden_states) # diffusers/src/diffusers/models/activations.py:88 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff.net[0].proj.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[6].ff.net[0].proj.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff.net[0].proj.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].ff.net[0].proj.training, 140591004393408) # hidden_states = self.proj(hidden_states) # diffusers/src/diffusers/models/activations.py:88 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff.net[0].proj._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff.net[0].proj.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].ff.net[0].proj.lora_A, 140533119327808) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff.net[0].proj.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff.net[0].proj.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].ff.net[0].proj.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff.net[0].proj.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].ff.net[0].proj.lora_A['default_0'], 140533119329200) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff.net[0].proj.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff.net[0].proj.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].ff.net[0].proj.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff.net[0].proj.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff.net[0].proj.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].ff.net[0].proj.lora_A['default_0'].weight, 140537313925120) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff.net[0].proj.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].ff.net[0].proj.lora_B, 140533119324688) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff.net[0].proj.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff.net[0].proj.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].ff.net[0].proj.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff.net[0].proj.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].ff.net[0].proj.lora_B['default_0'], 140533119326704) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff.net[0].proj.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff.net[0].proj.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].ff.net[0].proj.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff.net[0].proj.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].ff.net[0].proj.base_layer, 140581767539264) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff.net[0].proj.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff.net[0].proj.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].ff.net[0].proj.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff.net[0].proj.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].ff.net[0].proj.lora_dropout, 140533119327424) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff.net[0].proj.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff.net[0].proj.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].ff.net[0].proj.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff.net[0].proj.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].ff.net[0].proj.lora_dropout['default_0'], 140533119326128) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff.net[0].proj.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff.net[0].proj.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].ff.net[0].proj.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff.net[0].proj.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[6].ff.net[0].proj.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[6].ff.net[0].proj.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff.net[0].proj.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[6].ff.net[0].proj.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff.net[0].proj.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[6].ff.net[0].proj.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[6].ff.net[0].proj.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff.net[0].proj.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].ff.net[0].proj.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff.net[0].proj._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff.net[0].proj._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff.net[0].proj.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[6].ff.net[0].proj.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[6].ff.net[0].proj.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff.net[0].proj._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].ff.net[0].proj._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff.net[0].proj._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff.net[0].proj._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff.net[0].proj._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[6].ff.net[0].proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[6].ff.net[0].proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff.net[0].approximate, accessed_by=DictGetItemGuardAccessor(approximate) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[6].ff.net[0].approximate == 'tanh' # return F.gelu(gate, approximate=self.approximate) # diffusers/src/diffusers/models/activations.py:83 in gelu V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff.net[0]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff.net[0]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff.net[0]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff.net[0]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff.net[1], accessed_by=GetItemGuardAccessor(1) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].ff.net[1], 140581767539312) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff.net[1].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff.net[1].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].ff.net[1].training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff.net[2], accessed_by=GetItemGuardAccessor(2) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].ff.net[2], 140533119773920) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff.net[2].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[6].ff.net[2].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff.net[2].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].ff.net[2].training, 140591004393408) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff.net[2]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff.net[2].lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].ff.net[2].lora_A, 140533119766576) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff.net[2].lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff.net[2].lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].ff.net[2].lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff.net[2].lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].ff.net[2].lora_A['default_0'], 140533118932720) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff.net[2].lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff.net[2].lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].ff.net[2].lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff.net[2].lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff.net[2].lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].ff.net[2].lora_A['default_0'].weight, 140537313922320) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff.net[2].lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].ff.net[2].lora_B, 140533119772528) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff.net[2].lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff.net[2].lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].ff.net[2].lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff.net[2].lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].ff.net[2].lora_B['default_0'], 140533118930992) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff.net[2].lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff.net[2].lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].ff.net[2].lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff.net[2].base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].ff.net[2].base_layer, 140581767539360) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff.net[2].base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff.net[2].base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].ff.net[2].base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff.net[2].lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].ff.net[2].lora_dropout, 140533119774784) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff.net[2].lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff.net[2].lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].ff.net[2].lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff.net[2].lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].ff.net[2].lora_dropout['default_0'], 140533119775984) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff.net[2].lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff.net[2].lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].ff.net[2].lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff.net[2].scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[6].ff.net[2].scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[6].ff.net[2].scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff.net[2].scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[6].ff.net[2].scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff.net[2].use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[6].ff.net[2].use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[6].ff.net[2].use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff.net[2].use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].ff.net[2].use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff.net[2]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff.net[2]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff.net[2].merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[6].ff.net[2].merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[6].ff.net[2].merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff.net[2]._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].ff.net[2]._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff.net[2]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff.net[2]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff.net[2]._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[6].ff.net[2]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[6].ff.net[2]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn, accessed_by=DictGetItemGuardAccessor(attn) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn, 140581767538208) # attn_output, context_attn_output = self.attn( # diffusers/src/diffusers/models/transformers/transformer_flux.py:172 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[6].attn.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.training, 140591004393440) # attn_output, context_attn_output = self.attn( # diffusers/src/diffusers/models/transformers/transformer_flux.py:172 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_k, accessed_by=DictGetItemGuardAccessor(to_k) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.to_k, 140533119589664) # key = attn.to_k(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1717 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[6].attn.to_k.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.to_k.training, 140591004393408) # key = attn.to_k(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1717 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_k._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_k.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.to_k.lora_A, 140533119596336) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_k.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_k.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.to_k.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_k.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.to_k.lora_A['default_0'], 140533119592256) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_k.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_k.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.to_k.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_k.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_k.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.to_k.lora_A['default_0'].weight, 140537314128688) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_k.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.to_k.lora_B, 140533119583568) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_k.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_k.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.to_k.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_k.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.to_k.lora_B['default_0'], 140533119582896) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_k.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_k.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.to_k.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_k.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.to_k.base_layer, 140581767538352) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_k.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_k.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.to_k.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_k.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.to_k.lora_dropout, 140533119584960) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_k.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_k.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.to_k.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_k.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.to_k.lora_dropout['default_0'], 140533119586112) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_k.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_k.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.to_k.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_k.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[6].attn.to_k.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[6].attn.to_k.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_k.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[6].attn.to_k.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_k.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[6].attn.to_k.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[6].attn.to_k.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_k.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.to_k.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_k._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_k._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_k.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[6].attn.to_k.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[6].attn.to_k.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_k._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.to_k._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_k._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_k._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_k._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[6].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[6].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_q, accessed_by=DictGetItemGuardAccessor(to_q) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.to_q, 140533119597392) # query = attn.to_q(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1716 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[6].attn.to_q.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.to_q.training, 140591004393408) # query = attn.to_q(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1716 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_q._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_q.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.to_q.lora_A, 140533119597296) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_q.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_q.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.to_q.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_q.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.to_q.lora_A['default_0'], 140533119597008) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_q.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_q.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.to_q.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_q.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_q.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.to_q.lora_A['default_0'].weight, 140537314124448) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_q.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.to_q.lora_B, 140533119595712) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_q.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_q.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.to_q.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_q.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.to_q.lora_B['default_0'], 140533119596912) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_q.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_q.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.to_q.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_q.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.to_q.base_layer, 140581767538448) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_q.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_q.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.to_q.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_q.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.to_q.lora_dropout, 140533119590768) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_q.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_q.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.to_q.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_q.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.to_q.lora_dropout['default_0'], 140533119596864) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_q.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_q.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.to_q.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_q.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[6].attn.to_q.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[6].attn.to_q.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_q.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[6].attn.to_q.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_q.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[6].attn.to_q.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[6].attn.to_q.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_q.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.to_q.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_q._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_q._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_q.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[6].attn.to_q.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[6].attn.to_q.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_q._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.to_q._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_q._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_q._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_q._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[6].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[6].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_v, accessed_by=DictGetItemGuardAccessor(to_v) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.to_v, 140533119593120) # value = attn.to_v(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1718 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_v.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[6].attn.to_v.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_v.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.to_v.training, 140591004393408) # value = attn.to_v(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1718 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_v._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_v.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.to_v.lora_A, 140533119592784) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_v.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_v.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.to_v.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_v.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.to_v.lora_A['default_0'], 140533119592880) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_v.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_v.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.to_v.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_v.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_v.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.to_v.lora_A['default_0'].weight, 140537314123888) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_v.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.to_v.lora_B, 140533119592016) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_v.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_v.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.to_v.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_v.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.to_v.lora_B['default_0'], 140533119593936) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_v.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_v.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.to_v.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_v.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.to_v.base_layer, 140581767538544) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_v.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_v.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.to_v.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_v.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.to_v.lora_dropout, 140533119592208) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_v.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_v.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.to_v.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_v.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.to_v.lora_dropout['default_0'], 140533119593360) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_v.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_v.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.to_v.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_v.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[6].attn.to_v.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[6].attn.to_v.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_v.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[6].attn.to_v.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_v.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[6].attn.to_v.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[6].attn.to_v.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_v.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.to_v.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_v._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_v._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_v.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[6].attn.to_v.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[6].attn.to_v.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_v._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.to_v._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_v._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_v._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_v._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[6].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[6].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.norm_k, accessed_by=DictGetItemGuardAccessor(norm_k) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.norm_k, 140581767538400) # if attn.norm_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1729 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.norm_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[6].attn.norm_k.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.norm_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.norm_k.training, 140591004393440) # if attn.norm_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1729 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.norm_k.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[6].attn.norm_k.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.norm_k._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.norm_k.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.norm_k.weight, 140581772709296) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.norm_k._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.norm_k._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.norm_k._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.norm_k._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.norm_q, accessed_by=DictGetItemGuardAccessor(norm_q) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.norm_q, 140581767538304) # if attn.norm_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1727 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.norm_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[6].attn.norm_q.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.norm_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.norm_q.training, 140591004393440) # if attn.norm_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1727 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.norm_q.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[6].attn.norm_q.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.norm_q._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.norm_q.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.norm_q.weight, 140581783065344) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.norm_q._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.norm_q._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.norm_q._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.norm_q._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_out, accessed_by=DictGetItemGuardAccessor(to_out) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.to_out, 140581767538736) # hidden_states = attn.to_out[0](hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1776 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_out.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_out.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.to_out.training, 140591004393440) # hidden_states = attn.to_out[0](hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1776 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_out[0], accessed_by=GetItemGuardAccessor(0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.to_out[0], 140533119328768) # hidden_states = attn.to_out[0](hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1776 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_out[0].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[6].attn.to_out[0].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_out[0].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.to_out[0].training, 140591004393408) # hidden_states = attn.to_out[0](hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1776 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_out[0]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_out[0].lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.to_out[0].lora_A, 140533119328048) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_out[0].lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_out[0].lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.to_out[0].lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_out[0].lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.to_out[0].lora_A['default_0'], 140533119333856) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_out[0].lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_out[0].lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.to_out[0].lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_out[0].lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_out[0].lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.to_out[0].lora_A['default_0'].weight, 140537313934080) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_out[0].lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.to_out[0].lora_B, 140533119329008) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_out[0].lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_out[0].lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.to_out[0].lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_out[0].lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.to_out[0].lora_B['default_0'], 140533119335584) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_out[0].lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_out[0].lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.to_out[0].lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_out[0].base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.to_out[0].base_layer, 140581767538784) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_out[0].base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_out[0].base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.to_out[0].base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_out[0].lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.to_out[0].lora_dropout, 140533119328816) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_out[0].lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_out[0].lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.to_out[0].lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_out[0].lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.to_out[0].lora_dropout['default_0'], 140533119327664) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_out[0].lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_out[0].lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.to_out[0].lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_out[0].scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[6].attn.to_out[0].scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[6].attn.to_out[0].scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_out[0].scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[6].attn.to_out[0].scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_out[0].use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[6].attn.to_out[0].use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[6].attn.to_out[0].use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_out[0].use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.to_out[0].use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_out[0]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_out[0]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_out[0].merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[6].attn.to_out[0].merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[6].attn.to_out[0].merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_out[0]._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.to_out[0]._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_out[0]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_out[0]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_out[0]._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[6].attn.to_out[0]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[6].attn.to_out[0]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_out[1], accessed_by=GetItemGuardAccessor(1) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.to_out[1], 140581767538832) # hidden_states = attn.to_out[1](hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1778 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_out[1].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_out[1].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.to_out[1].training, 140591004393440) # hidden_states = attn.to_out[1](hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1778 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.add_k_proj, accessed_by=DictGetItemGuardAccessor(add_k_proj) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.add_k_proj, 140533119598016) # encoder_hidden_states_key_proj = attn.add_k_proj(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1736 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.add_k_proj.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[6].attn.add_k_proj.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.add_k_proj.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.add_k_proj.training, 140591004393408) # encoder_hidden_states_key_proj = attn.add_k_proj(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1736 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.add_k_proj._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.add_k_proj.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.add_k_proj.lora_A, 140533119597632) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.add_k_proj.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.add_k_proj.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.add_k_proj.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.add_k_proj.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.add_k_proj.lora_A['default_0'], 140533119440384) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.add_k_proj.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.add_k_proj.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.add_k_proj.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.add_k_proj.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.add_k_proj.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.add_k_proj.lora_A['default_0'].weight, 140537314115968) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.add_k_proj.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.add_k_proj.lora_B, 140533119597872) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.add_k_proj.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.add_k_proj.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.add_k_proj.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.add_k_proj.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.add_k_proj.lora_B['default_0'], 140533119438320) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.add_k_proj.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.add_k_proj.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.add_k_proj.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.add_k_proj.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.add_k_proj.base_layer, 140581767538592) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.add_k_proj.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.add_k_proj.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.add_k_proj.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.add_k_proj.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.add_k_proj.lora_dropout, 140533119593792) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.add_k_proj.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.add_k_proj.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.add_k_proj.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.add_k_proj.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.add_k_proj.lora_dropout['default_0'], 140533119597824) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.add_k_proj.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.add_k_proj.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.add_k_proj.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.add_k_proj.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[6].attn.add_k_proj.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[6].attn.add_k_proj.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.add_k_proj.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[6].attn.add_k_proj.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.add_k_proj.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[6].attn.add_k_proj.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[6].attn.add_k_proj.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.add_k_proj.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.add_k_proj.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.add_k_proj._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.add_k_proj._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.add_k_proj.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[6].attn.add_k_proj.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[6].attn.add_k_proj.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.add_k_proj._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.add_k_proj._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.add_k_proj._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.add_k_proj._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.add_k_proj._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[6].attn.add_k_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[6].attn.add_k_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.add_q_proj, accessed_by=DictGetItemGuardAccessor(add_q_proj) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.add_q_proj, 140533119447872) # encoder_hidden_states_query_proj = attn.add_q_proj(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1735 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.add_q_proj.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[6].attn.add_q_proj.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.add_q_proj.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.add_q_proj.training, 140591004393408) # encoder_hidden_states_query_proj = attn.add_q_proj(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1735 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.add_q_proj._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.add_q_proj.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.add_q_proj.lora_A, 140533119443600) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.add_q_proj.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.add_q_proj.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.add_q_proj.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.add_q_proj.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.add_q_proj.lora_A['default_0'], 140533119327136) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.add_q_proj.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.add_q_proj.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.add_q_proj.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.add_q_proj.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.add_q_proj.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.add_q_proj.lora_A['default_0'].weight, 140537313932800) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.add_q_proj.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.add_q_proj.lora_B, 140533119442928) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.add_q_proj.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.add_q_proj.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.add_q_proj.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.add_q_proj.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.add_q_proj.lora_B['default_0'], 140533119328960) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.add_q_proj.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.add_q_proj.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.add_q_proj.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.add_q_proj.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.add_q_proj.base_layer, 140581767538688) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.add_q_proj.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.add_q_proj.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.add_q_proj.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.add_q_proj.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.add_q_proj.lora_dropout, 140533119449024) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.add_q_proj.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.add_q_proj.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.add_q_proj.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.add_q_proj.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.add_q_proj.lora_dropout['default_0'], 140533119448976) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.add_q_proj.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.add_q_proj.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.add_q_proj.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.add_q_proj.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[6].attn.add_q_proj.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[6].attn.add_q_proj.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.add_q_proj.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[6].attn.add_q_proj.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.add_q_proj.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[6].attn.add_q_proj.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[6].attn.add_q_proj.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.add_q_proj.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.add_q_proj.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.add_q_proj._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.add_q_proj._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.add_q_proj.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[6].attn.add_q_proj.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[6].attn.add_q_proj.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.add_q_proj._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.add_q_proj._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.add_q_proj._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.add_q_proj._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.add_q_proj._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[6].attn.add_q_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[6].attn.add_q_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.add_v_proj, accessed_by=DictGetItemGuardAccessor(add_v_proj) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.add_v_proj, 140533119448832) # encoder_hidden_states_value_proj = attn.add_v_proj(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1737 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.add_v_proj.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[6].attn.add_v_proj.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.add_v_proj.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.add_v_proj.training, 140591004393408) # encoder_hidden_states_value_proj = attn.add_v_proj(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1737 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.add_v_proj._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.add_v_proj.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.add_v_proj.lora_A, 140533119448448) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.add_v_proj.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.add_v_proj.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.add_v_proj.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.add_v_proj.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.add_v_proj.lora_A['default_0'], 140533119446144) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.add_v_proj.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.add_v_proj.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.add_v_proj.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.add_v_proj.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.add_v_proj.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.add_v_proj.lora_A['default_0'].weight, 140537313926000) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.add_v_proj.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.add_v_proj.lora_B, 140533119448496) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.add_v_proj.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.add_v_proj.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.add_v_proj.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.add_v_proj.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.add_v_proj.lora_B['default_0'], 140533119445808) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.add_v_proj.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.add_v_proj.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.add_v_proj.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.add_v_proj.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.add_v_proj.base_layer, 140581767538640) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.add_v_proj.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.add_v_proj.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.add_v_proj.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.add_v_proj.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.add_v_proj.lora_dropout, 140533119442160) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.add_v_proj.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.add_v_proj.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.add_v_proj.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.add_v_proj.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.add_v_proj.lora_dropout['default_0'], 140533119440672) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.add_v_proj.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.add_v_proj.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.add_v_proj.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.add_v_proj.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[6].attn.add_v_proj.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[6].attn.add_v_proj.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.add_v_proj.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[6].attn.add_v_proj.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.add_v_proj.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[6].attn.add_v_proj.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[6].attn.add_v_proj.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.add_v_proj.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.add_v_proj.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.add_v_proj._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.add_v_proj._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.add_v_proj.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[6].attn.add_v_proj.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[6].attn.add_v_proj.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.add_v_proj._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.add_v_proj._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.add_v_proj._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.add_v_proj._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.add_v_proj._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[6].attn.add_v_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[6].attn.add_v_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_add_out, accessed_by=DictGetItemGuardAccessor(to_add_out) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.to_add_out, 140533119328144) # encoder_hidden_states = attn.to_add_out(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1779 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_add_out.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[6].attn.to_add_out.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_add_out.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.to_add_out.training, 140591004393408) # encoder_hidden_states = attn.to_add_out(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1779 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_add_out._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_add_out.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.to_add_out.lora_A, 140533119327328) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_add_out.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_add_out.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.to_add_out.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_add_out.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.to_add_out.lora_A['default_0'], 140533119326512) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_add_out.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_add_out.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.to_add_out.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_add_out.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_add_out.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.to_add_out.lora_A['default_0'].weight, 140537313927840) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_add_out.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.to_add_out.lora_B, 140533119325504) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_add_out.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_add_out.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.to_add_out.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_add_out.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.to_add_out.lora_B['default_0'], 140533119326224) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_add_out.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_add_out.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.to_add_out.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_add_out.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.to_add_out.base_layer, 140581767538880) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_add_out.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_add_out.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.to_add_out.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_add_out.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.to_add_out.lora_dropout, 140533119330592) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_add_out.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_add_out.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.to_add_out.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_add_out.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.to_add_out.lora_dropout['default_0'], 140533119330640) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_add_out.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_add_out.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.to_add_out.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_add_out.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[6].attn.to_add_out.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[6].attn.to_add_out.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_add_out.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[6].attn.to_add_out.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_add_out.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[6].attn.to_add_out.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[6].attn.to_add_out.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_add_out.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.to_add_out.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_add_out._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_add_out._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_add_out.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[6].attn.to_add_out.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[6].attn.to_add_out.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_add_out._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.to_add_out._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_add_out._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_add_out._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_add_out._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[6].attn.to_add_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[6].attn.to_add_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.norm_added_k, accessed_by=DictGetItemGuardAccessor(norm_added_k) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.norm_added_k, 140581767539024) # if attn.norm_added_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1751 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.norm_added_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[6].attn.norm_added_k.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.norm_added_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.norm_added_k.training, 140591004393440) # if attn.norm_added_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1751 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.norm_added_k.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[6].attn.norm_added_k.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.norm_added_k._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.norm_added_k.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.norm_added_k.weight, 140581772709936) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.norm_added_k._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.norm_added_k._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.norm_added_k._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.norm_added_k._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.norm_added_q, accessed_by=DictGetItemGuardAccessor(norm_added_q) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.norm_added_q, 140581767538928) # if attn.norm_added_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1749 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.norm_added_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[6].attn.norm_added_q.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.norm_added_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.norm_added_q.training, 140591004393440) # if attn.norm_added_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1749 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.norm_added_q.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[6].attn.norm_added_q.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.norm_added_q._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.norm_added_q.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.norm_added_q.weight, 140581772709216) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.norm_added_q._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.norm_added_q._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.norm_added_q._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.norm_added_q._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.heads, accessed_by=DictGetItemGuardAccessor(heads) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[6].attn.heads == 24 # head_dim = inner_dim // attn.heads # diffusers/src/diffusers/models/attention_processor.py:1721 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.processor, accessed_by=DictGetItemGuardAccessor(processor) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[6].attn.processor, 93831581524080) # attn_parameters = set(inspect.signature(self.processor.__call__).parameters.keys()) # diffusers/src/diffusers/models/attention_processor.py:479 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.processor, 140581767538160) # return self.processor( # diffusers/src/diffusers/models/attention_processor.py:490 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].norm1, accessed_by=DictGetItemGuardAccessor(norm1) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].norm1, 140581767537728) # norm_hidden_states, gate_msa, shift_mlp, scale_mlp, gate_mlp = self.norm1(hidden_states, emb=temb) # diffusers/src/diffusers/models/transformers/transformer_flux.py:165 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].norm1.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[6].norm1.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].norm1.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].norm1.training, 140591004393440) # norm_hidden_states, gate_msa, shift_mlp, scale_mlp, gate_mlp = self.norm1(hidden_states, emb=temb) # diffusers/src/diffusers/models/transformers/transformer_flux.py:165 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].norm1.emb, accessed_by=DictGetItemGuardAccessor(emb) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].norm1.emb, 140591004478624) # if self.emb is not None: # diffusers/src/diffusers/models/normalization.py:135 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].norm1._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].norm1.norm, accessed_by=DictGetItemGuardAccessor(norm) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].norm1.norm, 140581767537872) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:139 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].norm1.norm.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].norm1.norm.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].norm1.norm.training, 140591004393440) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:139 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].norm1.silu, accessed_by=DictGetItemGuardAccessor(silu) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].norm1.silu, 140581767537776) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].norm1.silu.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].norm1.silu.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].norm1.silu.training, 140591004393440) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].norm1.linear, accessed_by=DictGetItemGuardAccessor(linear) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].norm1.linear, 140533119125152) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].norm1.linear.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[6].norm1.linear.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].norm1.linear.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].norm1.linear.training, 140591004393408) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].norm1.linear._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].norm1.linear.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].norm1.linear.lora_A, 140533118902976) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].norm1.linear.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].norm1.linear.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].norm1.linear.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].norm1.linear.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].norm1.linear.lora_A['default_0'], 140533118903552) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].norm1.linear.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].norm1.linear.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].norm1.linear.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].norm1.linear.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].norm1.linear.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].norm1.linear.lora_A['default_0'].weight, 140537314130528) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].norm1.linear.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].norm1.linear.lora_B, 140533118908448) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].norm1.linear.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].norm1.linear.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].norm1.linear.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].norm1.linear.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].norm1.linear.lora_B['default_0'], 140533118909120) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].norm1.linear.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].norm1.linear.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].norm1.linear.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].norm1.linear.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].norm1.linear.base_layer, 140581767537824) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].norm1.linear.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].norm1.linear.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].norm1.linear.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].norm1.linear.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].norm1.linear.lora_dropout, 140533118897984) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].norm1.linear.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].norm1.linear.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].norm1.linear.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].norm1.linear.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].norm1.linear.lora_dropout['default_0'], 140533118895824) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].norm1.linear.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].norm1.linear.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].norm1.linear.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].norm1.linear.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[6].norm1.linear.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[6].norm1.linear.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].norm1.linear.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[6].norm1.linear.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].norm1.linear.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[6].norm1.linear.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[6].norm1.linear.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].norm1.linear.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].norm1.linear.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].norm1.linear._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].norm1.linear._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].norm1.linear.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[6].norm1.linear.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[6].norm1.linear.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].norm1.linear._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].norm1.linear._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].norm1.linear._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].norm1.linear._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].norm1.linear._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[6].norm1.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[6].norm1.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].norm1._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].norm1._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].norm1._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].norm1._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].norm2, accessed_by=DictGetItemGuardAccessor(norm2) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].norm2, 140581767539072) # norm_hidden_states = self.norm2(hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:182 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].norm2.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].norm2.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].norm2.training, 140591004393440) # norm_hidden_states = self.norm2(hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:182 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff_context, accessed_by=DictGetItemGuardAccessor(ff_context) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].ff_context, 140581767539408) # context_ff_output = self.ff_context(norm_encoder_hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:198 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff_context.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[6].ff_context.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff_context.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].ff_context.training, 140591004393440) # context_ff_output = self.ff_context(norm_encoder_hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:198 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff_context._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff_context.net, accessed_by=DictGetItemGuardAccessor(net) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].ff_context.net, 140581767539552) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[6].ff_context.net, 93831537618768) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- LENGTH_CHECK: len(L['self'].transformer_blocks[6].ff_context.net) == 3 # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff_context.net.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff_context.net.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].ff_context.net.training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff_context.net[0], accessed_by=GetItemGuardAccessor(0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].ff_context.net[0], 140581767539504) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff_context.net[0].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[6].ff_context.net[0].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff_context.net[0].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].ff_context.net[0].training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff_context.net[0]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff_context.net[0].proj, accessed_by=DictGetItemGuardAccessor(proj) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].ff_context.net[0].proj, 140533118938672) # hidden_states = self.proj(hidden_states) # diffusers/src/diffusers/models/activations.py:88 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff_context.net[0].proj.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[6].ff_context.net[0].proj.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff_context.net[0].proj.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].ff_context.net[0].proj.training, 140591004393408) # hidden_states = self.proj(hidden_states) # diffusers/src/diffusers/models/activations.py:88 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff_context.net[0].proj._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff_context.net[0].proj.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].ff_context.net[0].proj.lora_A, 140533118932576) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff_context.net[0].proj.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff_context.net[0].proj.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].ff_context.net[0].proj.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff_context.net[0].proj.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].ff_context.net[0].proj.lora_A['default_0'], 140533118937520) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff_context.net[0].proj.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff_context.net[0].proj.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].ff_context.net[0].proj.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff_context.net[0].proj.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff_context.net[0].proj.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].ff_context.net[0].proj.lora_A['default_0'].weight, 140537313802688) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff_context.net[0].proj.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].ff_context.net[0].proj.lora_B, 140533118937664) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff_context.net[0].proj.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff_context.net[0].proj.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].ff_context.net[0].proj.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff_context.net[0].proj.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].ff_context.net[0].proj.lora_B['default_0'], 140533118931232) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff_context.net[0].proj.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff_context.net[0].proj.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].ff_context.net[0].proj.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff_context.net[0].proj.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].ff_context.net[0].proj.base_layer, 140581767539600) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff_context.net[0].proj.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff_context.net[0].proj.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].ff_context.net[0].proj.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff_context.net[0].proj.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].ff_context.net[0].proj.lora_dropout, 140533118930944) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff_context.net[0].proj.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff_context.net[0].proj.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].ff_context.net[0].proj.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff_context.net[0].proj.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].ff_context.net[0].proj.lora_dropout['default_0'], 140533118935984) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff_context.net[0].proj.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff_context.net[0].proj.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].ff_context.net[0].proj.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff_context.net[0].proj.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[6].ff_context.net[0].proj.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[6].ff_context.net[0].proj.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff_context.net[0].proj.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[6].ff_context.net[0].proj.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff_context.net[0].proj.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[6].ff_context.net[0].proj.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[6].ff_context.net[0].proj.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff_context.net[0].proj.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].ff_context.net[0].proj.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff_context.net[0].proj._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff_context.net[0].proj._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff_context.net[0].proj.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[6].ff_context.net[0].proj.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[6].ff_context.net[0].proj.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff_context.net[0].proj._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].ff_context.net[0].proj._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff_context.net[0].proj._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff_context.net[0].proj._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff_context.net[0].proj._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[6].ff_context.net[0].proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[6].ff_context.net[0].proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff_context.net[0].approximate, accessed_by=DictGetItemGuardAccessor(approximate) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[6].ff_context.net[0].approximate == 'tanh' # return F.gelu(gate, approximate=self.approximate) # diffusers/src/diffusers/models/activations.py:83 in gelu V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff_context.net[0]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff_context.net[0]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff_context.net[0]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff_context.net[0]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff_context.net[1], accessed_by=GetItemGuardAccessor(1) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].ff_context.net[1], 140581767539696) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff_context.net[1].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff_context.net[1].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].ff_context.net[1].training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff_context.net[2], accessed_by=GetItemGuardAccessor(2) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].ff_context.net[2], 140533118929120) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff_context.net[2].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[6].ff_context.net[2].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff_context.net[2].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].ff_context.net[2].training, 140591004393408) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff_context.net[2]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff_context.net[2].lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].ff_context.net[2].lora_A, 140533118930512) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff_context.net[2].lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff_context.net[2].lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].ff_context.net[2].lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff_context.net[2].lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].ff_context.net[2].lora_A['default_0'], 140533118931136) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff_context.net[2].lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff_context.net[2].lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].ff_context.net[2].lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff_context.net[2].lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff_context.net[2].lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].ff_context.net[2].lora_A['default_0'].weight, 140537313802208) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff_context.net[2].lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].ff_context.net[2].lora_B, 140533118935504) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff_context.net[2].lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff_context.net[2].lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].ff_context.net[2].lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff_context.net[2].lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].ff_context.net[2].lora_B['default_0'], 140533118937472) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff_context.net[2].lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff_context.net[2].lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].ff_context.net[2].lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff_context.net[2].base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].ff_context.net[2].base_layer, 140581767539744) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff_context.net[2].base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff_context.net[2].base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].ff_context.net[2].base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff_context.net[2].lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].ff_context.net[2].lora_dropout, 140533118928832) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff_context.net[2].lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff_context.net[2].lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].ff_context.net[2].lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff_context.net[2].lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].ff_context.net[2].lora_dropout['default_0'], 140533118937904) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff_context.net[2].lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff_context.net[2].lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].ff_context.net[2].lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff_context.net[2].scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[6].ff_context.net[2].scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[6].ff_context.net[2].scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff_context.net[2].scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[6].ff_context.net[2].scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff_context.net[2].use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[6].ff_context.net[2].use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[6].ff_context.net[2].use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff_context.net[2].use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].ff_context.net[2].use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff_context.net[2]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff_context.net[2]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff_context.net[2].merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[6].ff_context.net[2].merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[6].ff_context.net[2].merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff_context.net[2]._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].ff_context.net[2]._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff_context.net[2]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff_context.net[2]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff_context.net[2]._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[6].ff_context.net[2]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[6].ff_context.net[2]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff_context._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff_context._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff_context._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff_context._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].norm1_context, accessed_by=DictGetItemGuardAccessor(norm1_context) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].norm1_context, 140581767537920) # norm_encoder_hidden_states, c_gate_msa, c_shift_mlp, c_scale_mlp, c_gate_mlp = self.norm1_context( # diffusers/src/diffusers/models/transformers/transformer_flux.py:167 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].norm1_context.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[6].norm1_context.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].norm1_context.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].norm1_context.training, 140591004393440) # norm_encoder_hidden_states, c_gate_msa, c_shift_mlp, c_scale_mlp, c_gate_mlp = self.norm1_context( # diffusers/src/diffusers/models/transformers/transformer_flux.py:167 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].norm1_context.emb, accessed_by=DictGetItemGuardAccessor(emb) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].norm1_context.emb, 140591004478624) # if self.emb is not None: # diffusers/src/diffusers/models/normalization.py:135 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].norm1_context._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].norm1_context.norm, accessed_by=DictGetItemGuardAccessor(norm) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].norm1_context.norm, 140581767538112) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:139 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].norm1_context.norm.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].norm1_context.norm.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].norm1_context.norm.training, 140591004393440) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:139 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].norm1_context.silu, accessed_by=DictGetItemGuardAccessor(silu) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].norm1_context.silu, 140581767538016) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].norm1_context.silu.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].norm1_context.silu.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].norm1_context.silu.training, 140591004393440) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].norm1_context.linear, accessed_by=DictGetItemGuardAccessor(linear) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].norm1_context.linear, 140533118908304) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].norm1_context.linear.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[6].norm1_context.linear.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].norm1_context.linear.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].norm1_context.linear.training, 140591004393408) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].norm1_context.linear._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].norm1_context.linear.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].norm1_context.linear.lora_A, 140533118904992) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].norm1_context.linear.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].norm1_context.linear.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].norm1_context.linear.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].norm1_context.linear.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].norm1_context.linear.lora_A['default_0'], 140533119590528) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].norm1_context.linear.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].norm1_context.linear.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].norm1_context.linear.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].norm1_context.linear.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].norm1_context.linear.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].norm1_context.linear.lora_A['default_0'].weight, 140537314131248) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].norm1_context.linear.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].norm1_context.linear.lora_B, 140533118901392) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].norm1_context.linear.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].norm1_context.linear.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].norm1_context.linear.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].norm1_context.linear.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].norm1_context.linear.lora_B['default_0'], 140533119594272) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].norm1_context.linear.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].norm1_context.linear.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].norm1_context.linear.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].norm1_context.linear.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].norm1_context.linear.base_layer, 140581767538064) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].norm1_context.linear.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].norm1_context.linear.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].norm1_context.linear.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].norm1_context.linear.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].norm1_context.linear.lora_dropout, 140533118910416) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].norm1_context.linear.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].norm1_context.linear.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].norm1_context.linear.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].norm1_context.linear.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].norm1_context.linear.lora_dropout['default_0'], 140533118900528) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].norm1_context.linear.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].norm1_context.linear.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].norm1_context.linear.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].norm1_context.linear.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[6].norm1_context.linear.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[6].norm1_context.linear.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].norm1_context.linear.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[6].norm1_context.linear.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].norm1_context.linear.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[6].norm1_context.linear.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[6].norm1_context.linear.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].norm1_context.linear.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].norm1_context.linear.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].norm1_context.linear._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].norm1_context.linear._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].norm1_context.linear.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[6].norm1_context.linear.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[6].norm1_context.linear.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].norm1_context.linear._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].norm1_context.linear._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].norm1_context.linear._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].norm1_context.linear._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].norm1_context.linear._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[6].norm1_context.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[6].norm1_context.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].norm1_context._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].norm1_context._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].norm1_context._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].norm1_context._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].norm2_context, accessed_by=DictGetItemGuardAccessor(norm2_context) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].norm2_context, 140581767539120) # norm_encoder_hidden_states = self.norm2_context(encoder_hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:195 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].norm2_context.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].norm2_context.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].norm2_context.training, 140591004393440) # norm_encoder_hidden_states = self.norm2_context(encoder_hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:195 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | +- GuardManager: source=L['self'].transformer_blocks[7], accessed_by=GetItemGuardAccessor(7) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7], 140581767537392) # for index_block, block in enumerate(self.transformer_blocks): # diffusers/src/diffusers/models/transformers/transformer_flux.py:471 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[7].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].training, 140591004393440) # for index_block, block in enumerate(self.transformer_blocks): # diffusers/src/diffusers/models/transformers/transformer_flux.py:471 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff, accessed_by=DictGetItemGuardAccessor(ff) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].ff, 140581767541040) # ff_output = self.ff(norm_hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:185 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[7].ff.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].ff.training, 140591004393440) # ff_output = self.ff(norm_hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:185 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff.net, accessed_by=DictGetItemGuardAccessor(net) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].ff.net, 140581767541280) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[7].ff.net, 93831537618768) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- LENGTH_CHECK: len(L['self'].transformer_blocks[7].ff.net) == 3 # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff.net.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff.net.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].ff.net.training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff.net[0], accessed_by=GetItemGuardAccessor(0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].ff.net[0], 140581767541232) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff.net[0].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[7].ff.net[0].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff.net[0].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].ff.net[0].training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff.net[0]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff.net[0].proj, accessed_by=DictGetItemGuardAccessor(proj) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].ff.net[0].proj, 140533119825088) # hidden_states = self.proj(hidden_states) # diffusers/src/diffusers/models/activations.py:88 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff.net[0].proj.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[7].ff.net[0].proj.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff.net[0].proj.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].ff.net[0].proj.training, 140591004393408) # hidden_states = self.proj(hidden_states) # diffusers/src/diffusers/models/activations.py:88 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff.net[0].proj._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff.net[0].proj.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].ff.net[0].proj.lora_A, 140533119827056) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff.net[0].proj.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff.net[0].proj.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].ff.net[0].proj.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff.net[0].proj.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].ff.net[0].proj.lora_A['default_0'], 140533119820240) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff.net[0].proj.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff.net[0].proj.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].ff.net[0].proj.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff.net[0].proj.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff.net[0].proj.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].ff.net[0].proj.lora_A['default_0'].weight, 140537313468608) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff.net[0].proj.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].ff.net[0].proj.lora_B, 140533119827680) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff.net[0].proj.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff.net[0].proj.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].ff.net[0].proj.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff.net[0].proj.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].ff.net[0].proj.lora_B['default_0'], 140533119819712) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff.net[0].proj.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff.net[0].proj.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].ff.net[0].proj.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff.net[0].proj.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].ff.net[0].proj.base_layer, 140581767541328) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff.net[0].proj.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff.net[0].proj.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].ff.net[0].proj.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff.net[0].proj.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].ff.net[0].proj.lora_dropout, 140533119816592) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff.net[0].proj.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff.net[0].proj.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].ff.net[0].proj.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff.net[0].proj.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].ff.net[0].proj.lora_dropout['default_0'], 140533119822544) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff.net[0].proj.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff.net[0].proj.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].ff.net[0].proj.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff.net[0].proj.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[7].ff.net[0].proj.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[7].ff.net[0].proj.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff.net[0].proj.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[7].ff.net[0].proj.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff.net[0].proj.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[7].ff.net[0].proj.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[7].ff.net[0].proj.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff.net[0].proj.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].ff.net[0].proj.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff.net[0].proj._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff.net[0].proj._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff.net[0].proj.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[7].ff.net[0].proj.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[7].ff.net[0].proj.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff.net[0].proj._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].ff.net[0].proj._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff.net[0].proj._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff.net[0].proj._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff.net[0].proj._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[7].ff.net[0].proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[7].ff.net[0].proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff.net[0].approximate, accessed_by=DictGetItemGuardAccessor(approximate) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[7].ff.net[0].approximate == 'tanh' # return F.gelu(gate, approximate=self.approximate) # diffusers/src/diffusers/models/activations.py:83 in gelu V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff.net[0]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff.net[0]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff.net[0]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff.net[0]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff.net[1], accessed_by=GetItemGuardAccessor(1) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].ff.net[1], 140581767541376) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff.net[1].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff.net[1].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].ff.net[1].training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff.net[2], accessed_by=GetItemGuardAccessor(2) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].ff.net[2], 140533119827728) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff.net[2].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[7].ff.net[2].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff.net[2].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].ff.net[2].training, 140591004393408) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff.net[2]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff.net[2].lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].ff.net[2].lora_A, 140533119826624) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff.net[2].lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff.net[2].lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].ff.net[2].lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff.net[2].lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].ff.net[2].lora_A['default_0'], 140533118992832) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff.net[2].lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff.net[2].lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].ff.net[2].lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff.net[2].lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff.net[2].lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].ff.net[2].lora_A['default_0'].weight, 140537313476048) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff.net[2].lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].ff.net[2].lora_B, 140533119819520) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff.net[2].lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff.net[2].lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].ff.net[2].lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff.net[2].lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].ff.net[2].lora_B['default_0'], 140533119002384) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff.net[2].lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff.net[2].lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].ff.net[2].lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff.net[2].base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].ff.net[2].base_layer, 140581767541424) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff.net[2].base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff.net[2].base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].ff.net[2].base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff.net[2].lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].ff.net[2].lora_dropout, 140533119823360) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff.net[2].lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff.net[2].lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].ff.net[2].lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff.net[2].lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].ff.net[2].lora_dropout['default_0'], 140533119822160) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff.net[2].lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff.net[2].lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].ff.net[2].lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff.net[2].scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[7].ff.net[2].scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[7].ff.net[2].scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff.net[2].scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[7].ff.net[2].scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff.net[2].use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[7].ff.net[2].use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[7].ff.net[2].use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff.net[2].use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].ff.net[2].use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff.net[2]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff.net[2]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff.net[2].merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[7].ff.net[2].merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[7].ff.net[2].merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff.net[2]._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].ff.net[2]._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff.net[2]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff.net[2]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff.net[2]._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[7].ff.net[2]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[7].ff.net[2]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn, accessed_by=DictGetItemGuardAccessor(attn) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn, 140581767540272) # attn_output, context_attn_output = self.attn( # diffusers/src/diffusers/models/transformers/transformer_flux.py:172 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[7].attn.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.training, 140591004393440) # attn_output, context_attn_output = self.attn( # diffusers/src/diffusers/models/transformers/transformer_flux.py:172 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_k, accessed_by=DictGetItemGuardAccessor(to_k) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.to_k, 140533119034048) # key = attn.to_k(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1717 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[7].attn.to_k.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.to_k.training, 140591004393408) # key = attn.to_k(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1717 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_k._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_k.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.to_k.lora_A, 140533119034288) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_k.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_k.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.to_k.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_k.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.to_k.lora_A['default_0'], 140533119148832) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_k.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_k.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.to_k.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_k.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_k.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.to_k.lora_A['default_0'].weight, 140537313791568) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_k.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.to_k.lora_B, 140533119030736) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_k.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_k.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.to_k.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_k.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.to_k.lora_B['default_0'], 140533119145280) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_k.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_k.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.to_k.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_k.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.to_k.base_layer, 140581767540416) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_k.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_k.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.to_k.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_k.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.to_k.lora_dropout, 140533119029248) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_k.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_k.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.to_k.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_k.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.to_k.lora_dropout['default_0'], 140533119034432) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_k.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_k.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.to_k.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_k.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[7].attn.to_k.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[7].attn.to_k.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_k.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[7].attn.to_k.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_k.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[7].attn.to_k.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[7].attn.to_k.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_k.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.to_k.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_k._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_k._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_k.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[7].attn.to_k.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[7].attn.to_k.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_k._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.to_k._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_k._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_k._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_k._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[7].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[7].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_q, accessed_by=DictGetItemGuardAccessor(to_q) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.to_q, 140533119033568) # query = attn.to_q(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1716 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[7].attn.to_q.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.to_q.training, 140591004393408) # query = attn.to_q(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1716 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_q._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_q.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.to_q.lora_A, 140533119031456) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_q.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_q.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.to_q.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_q.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.to_q.lora_A['default_0'], 140533119039856) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_q.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_q.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.to_q.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_q.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_q.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.to_q.lora_A['default_0'].weight, 140537313794368) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_q.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.to_q.lora_B, 140533119035008) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_q.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_q.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.to_q.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_q.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.to_q.lora_B['default_0'], 140533119029392) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_q.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_q.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.to_q.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_q.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.to_q.base_layer, 140581767540512) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_q.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_q.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.to_q.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_q.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.to_q.lora_dropout, 140533119034720) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_q.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_q.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.to_q.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_q.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.to_q.lora_dropout['default_0'], 140533119031600) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_q.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_q.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.to_q.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_q.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[7].attn.to_q.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[7].attn.to_q.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_q.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[7].attn.to_q.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_q.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[7].attn.to_q.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[7].attn.to_q.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_q.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.to_q.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_q._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_q._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_q.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[7].attn.to_q.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[7].attn.to_q.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_q._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.to_q._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_q._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_q._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_q._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[7].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[7].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_v, accessed_by=DictGetItemGuardAccessor(to_v) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.to_v, 140533119155792) # value = attn.to_v(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1718 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_v.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[7].attn.to_v.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_v.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.to_v.training, 140591004393408) # value = attn.to_v(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1718 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_v._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_v.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.to_v.lora_A, 140533119156032) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_v.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_v.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.to_v.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_v.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.to_v.lora_A['default_0'], 140533119148208) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_v.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_v.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.to_v.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_v.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_v.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.to_v.lora_A['default_0'].weight, 140537313622704) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_v.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.to_v.lora_B, 140533119152000) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_v.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_v.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.to_v.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_v.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.to_v.lora_B['default_0'], 140533119150704) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_v.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_v.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.to_v.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_v.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.to_v.base_layer, 140581767540608) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_v.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_v.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.to_v.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_v.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.to_v.lora_dropout, 140533119155936) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_v.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_v.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.to_v.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_v.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.to_v.lora_dropout['default_0'], 140533119155648) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_v.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_v.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.to_v.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_v.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[7].attn.to_v.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[7].attn.to_v.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_v.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[7].attn.to_v.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_v.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[7].attn.to_v.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[7].attn.to_v.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_v.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.to_v.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_v._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_v._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_v.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[7].attn.to_v.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[7].attn.to_v.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_v._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.to_v._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_v._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_v._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_v._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[7].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[7].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.norm_k, accessed_by=DictGetItemGuardAccessor(norm_k) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.norm_k, 140581767540464) # if attn.norm_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1729 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.norm_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[7].attn.norm_k.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.norm_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.norm_k.training, 140591004393440) # if attn.norm_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1729 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.norm_k.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[7].attn.norm_k.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.norm_k._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.norm_k.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.norm_k.weight, 140581785355344) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.norm_k._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.norm_k._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.norm_k._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.norm_k._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.norm_q, accessed_by=DictGetItemGuardAccessor(norm_q) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.norm_q, 140581767540368) # if attn.norm_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1727 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.norm_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[7].attn.norm_q.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.norm_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.norm_q.training, 140591004393440) # if attn.norm_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1727 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.norm_q.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[7].attn.norm_q.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.norm_q._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.norm_q.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.norm_q.weight, 140581773232464) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.norm_q._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.norm_q._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.norm_q._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.norm_q._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_out, accessed_by=DictGetItemGuardAccessor(to_out) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.to_out, 140581767540800) # hidden_states = attn.to_out[0](hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1776 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_out.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_out.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.to_out.training, 140591004393440) # hidden_states = attn.to_out[0](hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1776 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_out[0], accessed_by=GetItemGuardAccessor(0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.to_out[0], 140533119256224) # hidden_states = attn.to_out[0](hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1776 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_out[0].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[7].attn.to_out[0].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_out[0].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.to_out[0].training, 140591004393408) # hidden_states = attn.to_out[0](hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1776 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_out[0]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_out[0].lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.to_out[0].lora_A, 140533119270384) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_out[0].lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_out[0].lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.to_out[0].lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_out[0].lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.to_out[0].lora_A['default_0'], 140533119265584) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_out[0].lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_out[0].lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.to_out[0].lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_out[0].lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_out[0].lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.to_out[0].lora_A['default_0'].weight, 140537313621184) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_out[0].lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.to_out[0].lora_B, 140533119266304) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_out[0].lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_out[0].lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.to_out[0].lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_out[0].lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.to_out[0].lora_B['default_0'], 140533119268800) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_out[0].lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_out[0].lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.to_out[0].lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_out[0].base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.to_out[0].base_layer, 140581767540848) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_out[0].base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_out[0].base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.to_out[0].base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_out[0].lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.to_out[0].lora_dropout, 140533119258240) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_out[0].lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_out[0].lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.to_out[0].lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_out[0].lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.to_out[0].lora_dropout['default_0'], 140533119257952) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_out[0].lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_out[0].lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.to_out[0].lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_out[0].scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[7].attn.to_out[0].scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[7].attn.to_out[0].scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_out[0].scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[7].attn.to_out[0].scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_out[0].use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[7].attn.to_out[0].use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[7].attn.to_out[0].use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_out[0].use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.to_out[0].use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_out[0]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_out[0]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_out[0].merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[7].attn.to_out[0].merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[7].attn.to_out[0].merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_out[0]._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.to_out[0]._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_out[0]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_out[0]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_out[0]._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[7].attn.to_out[0]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[7].attn.to_out[0]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_out[1], accessed_by=GetItemGuardAccessor(1) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.to_out[1], 140581767540896) # hidden_states = attn.to_out[1](hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1778 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_out[1].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_out[1].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.to_out[1].training, 140591004393440) # hidden_states = attn.to_out[1](hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1778 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.add_k_proj, accessed_by=DictGetItemGuardAccessor(add_k_proj) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.add_k_proj, 140533119723952) # encoder_hidden_states_key_proj = attn.add_k_proj(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1736 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.add_k_proj.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[7].attn.add_k_proj.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.add_k_proj.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.add_k_proj.training, 140591004393408) # encoder_hidden_states_key_proj = attn.add_k_proj(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1736 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.add_k_proj._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.add_k_proj.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.add_k_proj.lora_A, 140533119723520) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.add_k_proj.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.add_k_proj.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.add_k_proj.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.add_k_proj.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.add_k_proj.lora_A['default_0'], 140533119716560) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.add_k_proj.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.add_k_proj.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.add_k_proj.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.add_k_proj.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.add_k_proj.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.add_k_proj.lora_A['default_0'].weight, 140537313623504) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.add_k_proj.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.add_k_proj.lora_B, 140533119717616) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.add_k_proj.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.add_k_proj.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.add_k_proj.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.add_k_proj.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.add_k_proj.lora_B['default_0'], 140533119727744) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.add_k_proj.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.add_k_proj.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.add_k_proj.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.add_k_proj.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.add_k_proj.base_layer, 140581767540656) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.add_k_proj.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.add_k_proj.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.add_k_proj.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.add_k_proj.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.add_k_proj.lora_dropout, 140533119715264) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.add_k_proj.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.add_k_proj.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.add_k_proj.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.add_k_proj.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.add_k_proj.lora_dropout['default_0'], 140533119720928) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.add_k_proj.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.add_k_proj.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.add_k_proj.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.add_k_proj.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[7].attn.add_k_proj.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[7].attn.add_k_proj.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.add_k_proj.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[7].attn.add_k_proj.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.add_k_proj.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[7].attn.add_k_proj.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[7].attn.add_k_proj.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.add_k_proj.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.add_k_proj.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.add_k_proj._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.add_k_proj._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.add_k_proj.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[7].attn.add_k_proj.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[7].attn.add_k_proj.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.add_k_proj._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.add_k_proj._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.add_k_proj._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.add_k_proj._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.add_k_proj._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[7].attn.add_k_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[7].attn.add_k_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.add_q_proj, accessed_by=DictGetItemGuardAccessor(add_q_proj) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.add_q_proj, 140533119255840) # encoder_hidden_states_query_proj = attn.add_q_proj(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1735 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.add_q_proj.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[7].attn.add_q_proj.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.add_q_proj.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.add_q_proj.training, 140591004393408) # encoder_hidden_states_query_proj = attn.add_q_proj(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1735 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.add_q_proj._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.add_q_proj.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.add_q_proj.lora_A, 140533119262464) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.add_q_proj.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.add_q_proj.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.add_q_proj.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.add_q_proj.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.add_q_proj.lora_A['default_0'], 140533119270096) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.add_q_proj.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.add_q_proj.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.add_q_proj.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.add_q_proj.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.add_q_proj.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.add_q_proj.lora_A['default_0'].weight, 140537313622784) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.add_q_proj.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.add_q_proj.lora_B, 140533119265968) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.add_q_proj.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.add_q_proj.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.add_q_proj.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.add_q_proj.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.add_q_proj.lora_B['default_0'], 140533119265536) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.add_q_proj.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.add_q_proj.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.add_q_proj.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.add_q_proj.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.add_q_proj.base_layer, 140581767540752) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.add_q_proj.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.add_q_proj.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.add_q_proj.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.add_q_proj.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.add_q_proj.lora_dropout, 140533119269136) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.add_q_proj.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.add_q_proj.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.add_q_proj.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.add_q_proj.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.add_q_proj.lora_dropout['default_0'], 140533119269952) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.add_q_proj.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.add_q_proj.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.add_q_proj.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.add_q_proj.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[7].attn.add_q_proj.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[7].attn.add_q_proj.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.add_q_proj.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[7].attn.add_q_proj.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.add_q_proj.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[7].attn.add_q_proj.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[7].attn.add_q_proj.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.add_q_proj.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.add_q_proj.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.add_q_proj._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.add_q_proj._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.add_q_proj.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[7].attn.add_q_proj.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[7].attn.add_q_proj.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.add_q_proj._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.add_q_proj._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.add_q_proj._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.add_q_proj._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.add_q_proj._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[7].attn.add_q_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[7].attn.add_q_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.add_v_proj, accessed_by=DictGetItemGuardAccessor(add_v_proj) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.add_v_proj, 140533119726400) # encoder_hidden_states_value_proj = attn.add_v_proj(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1737 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.add_v_proj.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[7].attn.add_v_proj.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.add_v_proj.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.add_v_proj.training, 140591004393408) # encoder_hidden_states_value_proj = attn.add_v_proj(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1737 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.add_v_proj._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.add_v_proj.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.add_v_proj.lora_A, 140533119258000) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.add_v_proj.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.add_v_proj.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.add_v_proj.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.add_v_proj.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.add_v_proj.lora_A['default_0'], 140533119269232) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.add_v_proj.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.add_v_proj.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.add_v_proj.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.add_v_proj.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.add_v_proj.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.add_v_proj.lora_A['default_0'].weight, 140537313619824) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.add_v_proj.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.add_v_proj.lora_B, 140533119262944) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.add_v_proj.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.add_v_proj.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.add_v_proj.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.add_v_proj.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.add_v_proj.lora_B['default_0'], 140533119268992) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.add_v_proj.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.add_v_proj.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.add_v_proj.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.add_v_proj.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.add_v_proj.base_layer, 140581767540704) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.add_v_proj.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.add_v_proj.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.add_v_proj.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.add_v_proj.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.add_v_proj.lora_dropout, 140533119254976) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.add_v_proj.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.add_v_proj.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.add_v_proj.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.add_v_proj.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.add_v_proj.lora_dropout['default_0'], 140533119719968) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.add_v_proj.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.add_v_proj.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.add_v_proj.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.add_v_proj.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[7].attn.add_v_proj.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[7].attn.add_v_proj.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.add_v_proj.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[7].attn.add_v_proj.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.add_v_proj.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[7].attn.add_v_proj.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[7].attn.add_v_proj.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.add_v_proj.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.add_v_proj.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.add_v_proj._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.add_v_proj._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.add_v_proj.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[7].attn.add_v_proj.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[7].attn.add_v_proj.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.add_v_proj._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.add_v_proj._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.add_v_proj._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.add_v_proj._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.add_v_proj._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[7].attn.add_v_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[7].attn.add_v_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_add_out, accessed_by=DictGetItemGuardAccessor(to_add_out) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.to_add_out, 140533119261744) # encoder_hidden_states = attn.to_add_out(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1779 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_add_out.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[7].attn.to_add_out.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_add_out.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.to_add_out.training, 140591004393408) # encoder_hidden_states = attn.to_add_out(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1779 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_add_out._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_add_out.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.to_add_out.lora_A, 140533119260352) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_add_out.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_add_out.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.to_add_out.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_add_out.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.to_add_out.lora_A['default_0'], 140533119816640) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_add_out.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_add_out.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.to_add_out.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_add_out.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_add_out.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.to_add_out.lora_A['default_0'].weight, 140537313609184) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_add_out.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.to_add_out.lora_B, 140533119268752) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_add_out.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_add_out.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.to_add_out.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_add_out.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.to_add_out.lora_B['default_0'], 140533119815152) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_add_out.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_add_out.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.to_add_out.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_add_out.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.to_add_out.base_layer, 140581767540944) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_add_out.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_add_out.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.to_add_out.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_add_out.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.to_add_out.lora_dropout, 140533119262416) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_add_out.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_add_out.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.to_add_out.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_add_out.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.to_add_out.lora_dropout['default_0'], 140533119265680) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_add_out.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_add_out.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.to_add_out.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_add_out.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[7].attn.to_add_out.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[7].attn.to_add_out.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_add_out.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[7].attn.to_add_out.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_add_out.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[7].attn.to_add_out.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[7].attn.to_add_out.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_add_out.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.to_add_out.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_add_out._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_add_out._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_add_out.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[7].attn.to_add_out.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[7].attn.to_add_out.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_add_out._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.to_add_out._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_add_out._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_add_out._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_add_out._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[7].attn.to_add_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[7].attn.to_add_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.norm_added_k, accessed_by=DictGetItemGuardAccessor(norm_added_k) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.norm_added_k, 140581767541088) # if attn.norm_added_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1751 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.norm_added_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[7].attn.norm_added_k.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.norm_added_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.norm_added_k.training, 140591004393440) # if attn.norm_added_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1751 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.norm_added_k.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[7].attn.norm_added_k.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.norm_added_k._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.norm_added_k.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.norm_added_k.weight, 140581765826096) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.norm_added_k._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.norm_added_k._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.norm_added_k._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.norm_added_k._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.norm_added_q, accessed_by=DictGetItemGuardAccessor(norm_added_q) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.norm_added_q, 140581767540992) # if attn.norm_added_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1749 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.norm_added_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[7].attn.norm_added_q.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.norm_added_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.norm_added_q.training, 140591004393440) # if attn.norm_added_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1749 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.norm_added_q.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[7].attn.norm_added_q.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.norm_added_q._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.norm_added_q.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.norm_added_q.weight, 140581772706816) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.norm_added_q._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.norm_added_q._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.norm_added_q._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.norm_added_q._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.heads, accessed_by=DictGetItemGuardAccessor(heads) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[7].attn.heads == 24 # head_dim = inner_dim // attn.heads # diffusers/src/diffusers/models/attention_processor.py:1721 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.processor, accessed_by=DictGetItemGuardAccessor(processor) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[7].attn.processor, 93831581524080) # attn_parameters = set(inspect.signature(self.processor.__call__).parameters.keys()) # diffusers/src/diffusers/models/attention_processor.py:479 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.processor, 140581767540224) # return self.processor( # diffusers/src/diffusers/models/attention_processor.py:490 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].norm1, accessed_by=DictGetItemGuardAccessor(norm1) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].norm1, 140581767539792) # norm_hidden_states, gate_msa, shift_mlp, scale_mlp, gate_mlp = self.norm1(hidden_states, emb=temb) # diffusers/src/diffusers/models/transformers/transformer_flux.py:165 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].norm1.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[7].norm1.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].norm1.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].norm1.training, 140591004393440) # norm_hidden_states, gate_msa, shift_mlp, scale_mlp, gate_mlp = self.norm1(hidden_states, emb=temb) # diffusers/src/diffusers/models/transformers/transformer_flux.py:165 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].norm1.emb, accessed_by=DictGetItemGuardAccessor(emb) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].norm1.emb, 140591004478624) # if self.emb is not None: # diffusers/src/diffusers/models/normalization.py:135 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].norm1._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].norm1.norm, accessed_by=DictGetItemGuardAccessor(norm) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].norm1.norm, 140581767539936) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:139 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].norm1.norm.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].norm1.norm.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].norm1.norm.training, 140591004393440) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:139 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].norm1.silu, accessed_by=DictGetItemGuardAccessor(silu) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].norm1.silu, 140581767539840) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].norm1.silu.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].norm1.silu.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].norm1.silu.training, 140591004393440) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].norm1.linear, accessed_by=DictGetItemGuardAccessor(linear) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].norm1.linear, 140533118934064) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].norm1.linear.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[7].norm1.linear.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].norm1.linear.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].norm1.linear.training, 140591004393408) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].norm1.linear._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].norm1.linear.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].norm1.linear.lora_A, 140533118936272) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].norm1.linear.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].norm1.linear.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].norm1.linear.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].norm1.linear.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].norm1.linear.lora_A['default_0'], 140533119037504) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].norm1.linear.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].norm1.linear.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].norm1.linear.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].norm1.linear.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].norm1.linear.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].norm1.linear.lora_A['default_0'].weight, 140537313802368) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].norm1.linear.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].norm1.linear.lora_B, 140533119038416) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].norm1.linear.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].norm1.linear.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].norm1.linear.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].norm1.linear.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].norm1.linear.lora_B['default_0'], 140533119039088) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].norm1.linear.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].norm1.linear.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].norm1.linear.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].norm1.linear.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].norm1.linear.base_layer, 140581767539888) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].norm1.linear.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].norm1.linear.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].norm1.linear.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].norm1.linear.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].norm1.linear.lora_dropout, 140533118938576) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].norm1.linear.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].norm1.linear.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].norm1.linear.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].norm1.linear.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].norm1.linear.lora_dropout['default_0'], 140533118937424) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].norm1.linear.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].norm1.linear.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].norm1.linear.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].norm1.linear.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[7].norm1.linear.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[7].norm1.linear.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].norm1.linear.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[7].norm1.linear.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].norm1.linear.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[7].norm1.linear.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[7].norm1.linear.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].norm1.linear.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].norm1.linear.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].norm1.linear._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].norm1.linear._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].norm1.linear.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[7].norm1.linear.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[7].norm1.linear.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].norm1.linear._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].norm1.linear._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].norm1.linear._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].norm1.linear._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].norm1.linear._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[7].norm1.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[7].norm1.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].norm1._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].norm1._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].norm1._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].norm1._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].norm2, accessed_by=DictGetItemGuardAccessor(norm2) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].norm2, 140581767541136) # norm_hidden_states = self.norm2(hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:182 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].norm2.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].norm2.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].norm2.training, 140591004393440) # norm_hidden_states = self.norm2(hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:182 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff_context, accessed_by=DictGetItemGuardAccessor(ff_context) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].ff_context, 140581767541472) # context_ff_output = self.ff_context(norm_encoder_hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:198 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff_context.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[7].ff_context.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff_context.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].ff_context.training, 140591004393440) # context_ff_output = self.ff_context(norm_encoder_hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:198 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff_context._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff_context.net, accessed_by=DictGetItemGuardAccessor(net) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].ff_context.net, 140581767541616) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[7].ff_context.net, 93831537618768) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- LENGTH_CHECK: len(L['self'].transformer_blocks[7].ff_context.net) == 3 # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff_context.net.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff_context.net.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].ff_context.net.training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff_context.net[0], accessed_by=GetItemGuardAccessor(0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].ff_context.net[0], 140581767541568) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff_context.net[0].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[7].ff_context.net[0].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff_context.net[0].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].ff_context.net[0].training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff_context.net[0]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff_context.net[0].proj, accessed_by=DictGetItemGuardAccessor(proj) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].ff_context.net[0].proj, 140533119001424) # hidden_states = self.proj(hidden_states) # diffusers/src/diffusers/models/activations.py:88 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff_context.net[0].proj.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[7].ff_context.net[0].proj.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff_context.net[0].proj.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].ff_context.net[0].proj.training, 140591004393408) # hidden_states = self.proj(hidden_states) # diffusers/src/diffusers/models/activations.py:88 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff_context.net[0].proj._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff_context.net[0].proj.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].ff_context.net[0].proj.lora_A, 140533119002048) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff_context.net[0].proj.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff_context.net[0].proj.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].ff_context.net[0].proj.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff_context.net[0].proj.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].ff_context.net[0].proj.lora_A['default_0'], 140533118994416) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff_context.net[0].proj.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff_context.net[0].proj.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].ff_context.net[0].proj.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff_context.net[0].proj.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff_context.net[0].proj.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].ff_context.net[0].proj.lora_A['default_0'].weight, 140537313468848) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff_context.net[0].proj.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].ff_context.net[0].proj.lora_B, 140533119004592) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff_context.net[0].proj.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff_context.net[0].proj.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].ff_context.net[0].proj.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff_context.net[0].proj.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].ff_context.net[0].proj.lora_B['default_0'], 140533119002288) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff_context.net[0].proj.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff_context.net[0].proj.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].ff_context.net[0].proj.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff_context.net[0].proj.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].ff_context.net[0].proj.base_layer, 140581767541664) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff_context.net[0].proj.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff_context.net[0].proj.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].ff_context.net[0].proj.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff_context.net[0].proj.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].ff_context.net[0].proj.lora_dropout, 140533119001760) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff_context.net[0].proj.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff_context.net[0].proj.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].ff_context.net[0].proj.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff_context.net[0].proj.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].ff_context.net[0].proj.lora_dropout['default_0'], 140533118992976) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff_context.net[0].proj.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff_context.net[0].proj.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].ff_context.net[0].proj.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff_context.net[0].proj.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[7].ff_context.net[0].proj.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[7].ff_context.net[0].proj.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff_context.net[0].proj.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[7].ff_context.net[0].proj.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff_context.net[0].proj.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[7].ff_context.net[0].proj.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[7].ff_context.net[0].proj.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff_context.net[0].proj.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].ff_context.net[0].proj.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff_context.net[0].proj._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff_context.net[0].proj._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff_context.net[0].proj.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[7].ff_context.net[0].proj.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[7].ff_context.net[0].proj.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff_context.net[0].proj._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].ff_context.net[0].proj._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff_context.net[0].proj._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff_context.net[0].proj._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff_context.net[0].proj._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[7].ff_context.net[0].proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[7].ff_context.net[0].proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff_context.net[0].approximate, accessed_by=DictGetItemGuardAccessor(approximate) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[7].ff_context.net[0].approximate == 'tanh' # return F.gelu(gate, approximate=self.approximate) # diffusers/src/diffusers/models/activations.py:83 in gelu V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff_context.net[0]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff_context.net[0]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff_context.net[0]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff_context.net[0]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff_context.net[1], accessed_by=GetItemGuardAccessor(1) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].ff_context.net[1], 140581767541760) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff_context.net[1].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff_context.net[1].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].ff_context.net[1].training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff_context.net[2], accessed_by=GetItemGuardAccessor(2) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].ff_context.net[2], 140533119006512) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff_context.net[2].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[7].ff_context.net[2].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff_context.net[2].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].ff_context.net[2].training, 140591004393408) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff_context.net[2]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff_context.net[2].lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].ff_context.net[2].lora_A, 140533119008624) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff_context.net[2].lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff_context.net[2].lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].ff_context.net[2].lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff_context.net[2].lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].ff_context.net[2].lora_A['default_0'], 140533119005072) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff_context.net[2].lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff_context.net[2].lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].ff_context.net[2].lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff_context.net[2].lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff_context.net[2].lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].ff_context.net[2].lora_A['default_0'].weight, 140537313469488) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff_context.net[2].lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].ff_context.net[2].lora_B, 140533119008528) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff_context.net[2].lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff_context.net[2].lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].ff_context.net[2].lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff_context.net[2].lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].ff_context.net[2].lora_B['default_0'], 140533119005744) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff_context.net[2].lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff_context.net[2].lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].ff_context.net[2].lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff_context.net[2].base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].ff_context.net[2].base_layer, 140581767541808) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff_context.net[2].base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff_context.net[2].base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].ff_context.net[2].base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff_context.net[2].lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].ff_context.net[2].lora_dropout, 140533118999984) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff_context.net[2].lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff_context.net[2].lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].ff_context.net[2].lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff_context.net[2].lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].ff_context.net[2].lora_dropout['default_0'], 140533119006272) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff_context.net[2].lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff_context.net[2].lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].ff_context.net[2].lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff_context.net[2].scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[7].ff_context.net[2].scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[7].ff_context.net[2].scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff_context.net[2].scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[7].ff_context.net[2].scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff_context.net[2].use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[7].ff_context.net[2].use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[7].ff_context.net[2].use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff_context.net[2].use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].ff_context.net[2].use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff_context.net[2]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff_context.net[2]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff_context.net[2].merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[7].ff_context.net[2].merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[7].ff_context.net[2].merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff_context.net[2]._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].ff_context.net[2]._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff_context.net[2]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff_context.net[2]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff_context.net[2]._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[7].ff_context.net[2]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[7].ff_context.net[2]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff_context._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff_context._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff_context._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff_context._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].norm1_context, accessed_by=DictGetItemGuardAccessor(norm1_context) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].norm1_context, 140581767539984) # norm_encoder_hidden_states, c_gate_msa, c_shift_mlp, c_scale_mlp, c_gate_mlp = self.norm1_context( # diffusers/src/diffusers/models/transformers/transformer_flux.py:167 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].norm1_context.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[7].norm1_context.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].norm1_context.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].norm1_context.training, 140591004393440) # norm_encoder_hidden_states, c_gate_msa, c_shift_mlp, c_scale_mlp, c_gate_mlp = self.norm1_context( # diffusers/src/diffusers/models/transformers/transformer_flux.py:167 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].norm1_context.emb, accessed_by=DictGetItemGuardAccessor(emb) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].norm1_context.emb, 140591004478624) # if self.emb is not None: # diffusers/src/diffusers/models/normalization.py:135 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].norm1_context._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].norm1_context.norm, accessed_by=DictGetItemGuardAccessor(norm) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].norm1_context.norm, 140581767540176) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:139 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].norm1_context.norm.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].norm1_context.norm.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].norm1_context.norm.training, 140591004393440) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:139 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].norm1_context.silu, accessed_by=DictGetItemGuardAccessor(silu) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].norm1_context.silu, 140581767540080) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].norm1_context.silu.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].norm1_context.silu.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].norm1_context.silu.training, 140591004393440) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].norm1_context.linear, accessed_by=DictGetItemGuardAccessor(linear) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].norm1_context.linear, 140533119039328) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].norm1_context.linear.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[7].norm1_context.linear.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].norm1_context.linear.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].norm1_context.linear.training, 140591004393408) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].norm1_context.linear._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].norm1_context.linear.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].norm1_context.linear.lora_A, 140533119038848) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].norm1_context.linear.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].norm1_context.linear.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].norm1_context.linear.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].norm1_context.linear.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].norm1_context.linear.lora_A['default_0'], 140533119035536) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].norm1_context.linear.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].norm1_context.linear.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].norm1_context.linear.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].norm1_context.linear.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].norm1_context.linear.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].norm1_context.linear.lora_A['default_0'].weight, 140537313795728) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].norm1_context.linear.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].norm1_context.linear.lora_B, 140533119037360) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].norm1_context.linear.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].norm1_context.linear.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].norm1_context.linear.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].norm1_context.linear.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].norm1_context.linear.lora_B['default_0'], 140533119035200) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].norm1_context.linear.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].norm1_context.linear.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].norm1_context.linear.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].norm1_context.linear.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].norm1_context.linear.base_layer, 140581767540128) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].norm1_context.linear.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].norm1_context.linear.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].norm1_context.linear.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].norm1_context.linear.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].norm1_context.linear.lora_dropout, 140533119038944) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].norm1_context.linear.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].norm1_context.linear.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].norm1_context.linear.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].norm1_context.linear.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].norm1_context.linear.lora_dropout['default_0'], 140533119038608) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].norm1_context.linear.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].norm1_context.linear.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].norm1_context.linear.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].norm1_context.linear.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[7].norm1_context.linear.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[7].norm1_context.linear.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].norm1_context.linear.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[7].norm1_context.linear.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].norm1_context.linear.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[7].norm1_context.linear.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[7].norm1_context.linear.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].norm1_context.linear.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].norm1_context.linear.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].norm1_context.linear._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].norm1_context.linear._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].norm1_context.linear.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[7].norm1_context.linear.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[7].norm1_context.linear.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].norm1_context.linear._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].norm1_context.linear._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].norm1_context.linear._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].norm1_context.linear._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].norm1_context.linear._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[7].norm1_context.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[7].norm1_context.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].norm1_context._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].norm1_context._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].norm1_context._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].norm1_context._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].norm2_context, accessed_by=DictGetItemGuardAccessor(norm2_context) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].norm2_context, 140581767541184) # norm_encoder_hidden_states = self.norm2_context(encoder_hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:195 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].norm2_context.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].norm2_context.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].norm2_context.training, 140591004393440) # norm_encoder_hidden_states = self.norm2_context(encoder_hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:195 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | +- GuardManager: source=L['self'].transformer_blocks[8], accessed_by=GetItemGuardAccessor(8) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8], 140581767539456) # for index_block, block in enumerate(self.transformer_blocks): # diffusers/src/diffusers/models/transformers/transformer_flux.py:471 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[8].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].training, 140591004393440) # for index_block, block in enumerate(self.transformer_blocks): # diffusers/src/diffusers/models/transformers/transformer_flux.py:471 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff, accessed_by=DictGetItemGuardAccessor(ff) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].ff, 140581767543104) # ff_output = self.ff(norm_hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:185 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[8].ff.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].ff.training, 140591004393440) # ff_output = self.ff(norm_hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:185 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff.net, accessed_by=DictGetItemGuardAccessor(net) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].ff.net, 140581767543344) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[8].ff.net, 93831537618768) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- LENGTH_CHECK: len(L['self'].transformer_blocks[8].ff.net) == 3 # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff.net.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff.net.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].ff.net.training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff.net[0], accessed_by=GetItemGuardAccessor(0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].ff.net[0], 140581767543296) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff.net[0].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[8].ff.net[0].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff.net[0].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].ff.net[0].training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff.net[0]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff.net[0].proj, accessed_by=DictGetItemGuardAccessor(proj) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].ff.net[0].proj, 140533121925376) # hidden_states = self.proj(hidden_states) # diffusers/src/diffusers/models/activations.py:88 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff.net[0].proj.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[8].ff.net[0].proj.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff.net[0].proj.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].ff.net[0].proj.training, 140591004393408) # hidden_states = self.proj(hidden_states) # diffusers/src/diffusers/models/activations.py:88 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff.net[0].proj._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff.net[0].proj.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].ff.net[0].proj.lora_A, 140533121925184) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff.net[0].proj.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff.net[0].proj.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].ff.net[0].proj.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff.net[0].proj.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].ff.net[0].proj.lora_A['default_0'], 140533121936224) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff.net[0].proj.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff.net[0].proj.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].ff.net[0].proj.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff.net[0].proj.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff.net[0].proj.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].ff.net[0].proj.lora_A['default_0'].weight, 140537313186480) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff.net[0].proj.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].ff.net[0].proj.lora_B, 140533121933872) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff.net[0].proj.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff.net[0].proj.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].ff.net[0].proj.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff.net[0].proj.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].ff.net[0].proj.lora_B['default_0'], 140533121938864) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff.net[0].proj.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff.net[0].proj.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].ff.net[0].proj.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff.net[0].proj.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].ff.net[0].proj.base_layer, 140581767543392) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff.net[0].proj.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff.net[0].proj.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].ff.net[0].proj.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff.net[0].proj.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].ff.net[0].proj.lora_dropout, 140533121925472) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff.net[0].proj.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff.net[0].proj.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].ff.net[0].proj.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff.net[0].proj.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].ff.net[0].proj.lora_dropout['default_0'], 140533121925424) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff.net[0].proj.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff.net[0].proj.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].ff.net[0].proj.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff.net[0].proj.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[8].ff.net[0].proj.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[8].ff.net[0].proj.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff.net[0].proj.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[8].ff.net[0].proj.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff.net[0].proj.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[8].ff.net[0].proj.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[8].ff.net[0].proj.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff.net[0].proj.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].ff.net[0].proj.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff.net[0].proj._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff.net[0].proj._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff.net[0].proj.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[8].ff.net[0].proj.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[8].ff.net[0].proj.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff.net[0].proj._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].ff.net[0].proj._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff.net[0].proj._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff.net[0].proj._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff.net[0].proj._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[8].ff.net[0].proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[8].ff.net[0].proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff.net[0].approximate, accessed_by=DictGetItemGuardAccessor(approximate) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[8].ff.net[0].approximate == 'tanh' # return F.gelu(gate, approximate=self.approximate) # diffusers/src/diffusers/models/activations.py:83 in gelu V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff.net[0]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff.net[0]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff.net[0]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff.net[0]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff.net[1], accessed_by=GetItemGuardAccessor(1) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].ff.net[1], 140581767543440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff.net[1].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff.net[1].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].ff.net[1].training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff.net[2], accessed_by=GetItemGuardAccessor(2) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].ff.net[2], 140533121932336) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff.net[2].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[8].ff.net[2].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff.net[2].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].ff.net[2].training, 140591004393408) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff.net[2]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff.net[2].lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].ff.net[2].lora_A, 140533121936800) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff.net[2].lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff.net[2].lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].ff.net[2].lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff.net[2].lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].ff.net[2].lora_A['default_0'], 140533119492608) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff.net[2].lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff.net[2].lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].ff.net[2].lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff.net[2].lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff.net[2].lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].ff.net[2].lora_A['default_0'].weight, 140537313194400) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff.net[2].lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].ff.net[2].lora_B, 140533121928160) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff.net[2].lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff.net[2].lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].ff.net[2].lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff.net[2].lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].ff.net[2].lora_B['default_0'], 140533119493472) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff.net[2].lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff.net[2].lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].ff.net[2].lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff.net[2].base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].ff.net[2].base_layer, 140581767543488) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff.net[2].base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff.net[2].base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].ff.net[2].base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff.net[2].lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].ff.net[2].lora_dropout, 140533121940832) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff.net[2].lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff.net[2].lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].ff.net[2].lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff.net[2].lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].ff.net[2].lora_dropout['default_0'], 140533121931136) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff.net[2].lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff.net[2].lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].ff.net[2].lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff.net[2].scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[8].ff.net[2].scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[8].ff.net[2].scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff.net[2].scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[8].ff.net[2].scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff.net[2].use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[8].ff.net[2].use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[8].ff.net[2].use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff.net[2].use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].ff.net[2].use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff.net[2]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff.net[2]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff.net[2].merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[8].ff.net[2].merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[8].ff.net[2].merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff.net[2]._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].ff.net[2]._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff.net[2]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff.net[2]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff.net[2]._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[8].ff.net[2]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[8].ff.net[2]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn, accessed_by=DictGetItemGuardAccessor(attn) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn, 140581767542336) # attn_output, context_attn_output = self.attn( # diffusers/src/diffusers/models/transformers/transformer_flux.py:172 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[8].attn.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.training, 140591004393440) # attn_output, context_attn_output = self.attn( # diffusers/src/diffusers/models/transformers/transformer_flux.py:172 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_k, accessed_by=DictGetItemGuardAccessor(to_k) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.to_k, 140533119215952) # key = attn.to_k(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1717 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[8].attn.to_k.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.to_k.training, 140591004393408) # key = attn.to_k(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1717 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_k._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_k.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.to_k.lora_A, 140533119207648) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_k.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_k.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.to_k.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_k.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.to_k.lora_A['default_0'], 140533117799776) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_k.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_k.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.to_k.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_k.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_k.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.to_k.lora_A['default_0'].weight, 140537313328032) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_k.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.to_k.lora_B, 140533119209808) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_k.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_k.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.to_k.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_k.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.to_k.lora_B['default_0'], 140533117803856) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_k.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_k.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.to_k.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_k.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.to_k.base_layer, 140581767542480) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_k.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_k.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.to_k.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_k.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.to_k.lora_dropout, 140533119217824) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_k.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_k.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.to_k.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_k.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.to_k.lora_dropout['default_0'], 140533119211728) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_k.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_k.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.to_k.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_k.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[8].attn.to_k.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[8].attn.to_k.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_k.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[8].attn.to_k.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_k.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[8].attn.to_k.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[8].attn.to_k.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_k.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.to_k.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_k._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_k._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_k.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[8].attn.to_k.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[8].attn.to_k.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_k._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.to_k._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_k._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_k._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_k._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[8].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[8].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_q, accessed_by=DictGetItemGuardAccessor(to_q) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.to_q, 140533119403968) # query = attn.to_q(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1716 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[8].attn.to_q.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.to_q.training, 140591004393408) # query = attn.to_q(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1716 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_q._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_q.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.to_q.lora_A, 140533119408864) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_q.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_q.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.to_q.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_q.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.to_q.lora_A['default_0'], 140533119408816) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_q.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_q.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.to_q.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_q.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_q.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.to_q.lora_A['default_0'].weight, 140537313313712) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_q.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.to_q.lora_B, 140533119404256) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_q.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_q.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.to_q.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_q.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.to_q.lora_B['default_0'], 140533119408192) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_q.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_q.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.to_q.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_q.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.to_q.base_layer, 140581767542576) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_q.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_q.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.to_q.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_q.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.to_q.lora_dropout, 140533119407712) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_q.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_q.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.to_q.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_q.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.to_q.lora_dropout['default_0'], 140533119408912) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_q.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_q.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.to_q.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_q.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[8].attn.to_q.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[8].attn.to_q.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_q.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[8].attn.to_q.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_q.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[8].attn.to_q.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[8].attn.to_q.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_q.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.to_q.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_q._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_q._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_q.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[8].attn.to_q.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[8].attn.to_q.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_q._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.to_q._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_q._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_q._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_q._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[8].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[8].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_v, accessed_by=DictGetItemGuardAccessor(to_v) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.to_v, 140533117797376) # value = attn.to_v(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1718 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_v.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[8].attn.to_v.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_v.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.to_v.training, 140591004393408) # value = attn.to_v(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1718 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_v._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_v.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.to_v.lora_A, 140533117809904) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_v.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_v.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.to_v.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_v.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.to_v.lora_A['default_0'], 140533117798528) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_v.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_v.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.to_v.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_v.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_v.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.to_v.lora_A['default_0'].weight, 140537313314112) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_v.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.to_v.lora_B, 140533117797568) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_v.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_v.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.to_v.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_v.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.to_v.lora_B['default_0'], 140533117810576) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_v.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_v.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.to_v.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_v.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.to_v.base_layer, 140581767542672) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_v.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_v.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.to_v.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_v.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.to_v.lora_dropout, 140533117798816) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_v.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_v.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.to_v.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_v.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.to_v.lora_dropout['default_0'], 140533117798960) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_v.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_v.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.to_v.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_v.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[8].attn.to_v.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[8].attn.to_v.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_v.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[8].attn.to_v.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_v.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[8].attn.to_v.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[8].attn.to_v.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_v.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.to_v.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_v._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_v._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_v.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[8].attn.to_v.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[8].attn.to_v.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_v._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.to_v._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_v._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_v._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_v._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[8].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[8].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.norm_k, accessed_by=DictGetItemGuardAccessor(norm_k) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.norm_k, 140581767542528) # if attn.norm_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1729 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.norm_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[8].attn.norm_k.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.norm_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.norm_k.training, 140591004393440) # if attn.norm_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1729 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.norm_k.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[8].attn.norm_k.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.norm_k._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.norm_k.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.norm_k.weight, 140581774129120) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.norm_k._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.norm_k._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.norm_k._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.norm_k._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.norm_q, accessed_by=DictGetItemGuardAccessor(norm_q) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.norm_q, 140581767542432) # if attn.norm_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1727 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.norm_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[8].attn.norm_q.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.norm_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.norm_q.training, 140591004393440) # if attn.norm_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1727 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.norm_q.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[8].attn.norm_q.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.norm_q._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.norm_q.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.norm_q.weight, 140581772718176) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.norm_q._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.norm_q._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.norm_q._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.norm_q._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_out, accessed_by=DictGetItemGuardAccessor(to_out) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.to_out, 140581767542864) # hidden_states = attn.to_out[0](hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1776 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_out.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_out.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.to_out.training, 140591004393440) # hidden_states = attn.to_out[0](hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1776 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_out[0], accessed_by=GetItemGuardAccessor(0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.to_out[0], 140533119533744) # hidden_states = attn.to_out[0](hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1776 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_out[0].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[8].attn.to_out[0].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_out[0].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.to_out[0].training, 140591004393408) # hidden_states = attn.to_out[0](hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1776 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_out[0]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_out[0].lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.to_out[0].lora_A, 140533119534032) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_out[0].lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_out[0].lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.to_out[0].lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_out[0].lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.to_out[0].lora_A['default_0'], 140533119544832) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_out[0].lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_out[0].lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.to_out[0].lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_out[0].lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_out[0].lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.to_out[0].lora_A['default_0'].weight, 140537313313312) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_out[0].lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.to_out[0].lora_B, 140533119536432) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_out[0].lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_out[0].lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.to_out[0].lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_out[0].lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.to_out[0].lora_B['default_0'], 140533119538016) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_out[0].lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_out[0].lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.to_out[0].lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_out[0].base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.to_out[0].base_layer, 140581767542912) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_out[0].base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_out[0].base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.to_out[0].base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_out[0].lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.to_out[0].lora_dropout, 140533119533792) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_out[0].lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_out[0].lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.to_out[0].lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_out[0].lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.to_out[0].lora_dropout['default_0'], 140533119534848) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_out[0].lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_out[0].lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.to_out[0].lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_out[0].scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[8].attn.to_out[0].scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[8].attn.to_out[0].scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_out[0].scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[8].attn.to_out[0].scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_out[0].use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[8].attn.to_out[0].use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[8].attn.to_out[0].use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_out[0].use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.to_out[0].use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_out[0]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_out[0]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_out[0].merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[8].attn.to_out[0].merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[8].attn.to_out[0].merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_out[0]._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.to_out[0]._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_out[0]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_out[0]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_out[0]._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[8].attn.to_out[0]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[8].attn.to_out[0]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_out[1], accessed_by=GetItemGuardAccessor(1) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.to_out[1], 140581767542960) # hidden_states = attn.to_out[1](hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1778 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_out[1].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_out[1].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.to_out[1].training, 140591004393440) # hidden_states = attn.to_out[1](hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1778 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.add_k_proj, accessed_by=DictGetItemGuardAccessor(add_k_proj) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.add_k_proj, 140533120188672) # encoder_hidden_states_key_proj = attn.add_k_proj(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1736 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.add_k_proj.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[8].attn.add_k_proj.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.add_k_proj.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.add_k_proj.training, 140591004393408) # encoder_hidden_states_key_proj = attn.add_k_proj(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1736 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.add_k_proj._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.add_k_proj.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.add_k_proj.lora_A, 140533118736208) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.add_k_proj.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.add_k_proj.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.add_k_proj.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.add_k_proj.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.add_k_proj.lora_A['default_0'], 140533118735344) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.add_k_proj.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.add_k_proj.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.add_k_proj.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.add_k_proj.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.add_k_proj.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.add_k_proj.lora_A['default_0'].weight, 140537313327232) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.add_k_proj.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.add_k_proj.lora_B, 140533118736544) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.add_k_proj.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.add_k_proj.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.add_k_proj.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.add_k_proj.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.add_k_proj.lora_B['default_0'], 140533118733808) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.add_k_proj.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.add_k_proj.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.add_k_proj.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.add_k_proj.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.add_k_proj.base_layer, 140581767542720) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.add_k_proj.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.add_k_proj.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.add_k_proj.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.add_k_proj.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.add_k_proj.lora_dropout, 140533118733040) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.add_k_proj.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.add_k_proj.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.add_k_proj.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.add_k_proj.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.add_k_proj.lora_dropout['default_0'], 140533118733328) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.add_k_proj.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.add_k_proj.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.add_k_proj.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.add_k_proj.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[8].attn.add_k_proj.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[8].attn.add_k_proj.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.add_k_proj.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[8].attn.add_k_proj.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.add_k_proj.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[8].attn.add_k_proj.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[8].attn.add_k_proj.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.add_k_proj.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.add_k_proj.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.add_k_proj._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.add_k_proj._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.add_k_proj.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[8].attn.add_k_proj.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[8].attn.add_k_proj.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.add_k_proj._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.add_k_proj._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.add_k_proj._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.add_k_proj._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.add_k_proj._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[8].attn.add_k_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[8].attn.add_k_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.add_q_proj, accessed_by=DictGetItemGuardAccessor(add_q_proj) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.add_q_proj, 140533117797136) # encoder_hidden_states_query_proj = attn.add_q_proj(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1735 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.add_q_proj.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[8].attn.add_q_proj.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.add_q_proj.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.add_q_proj.training, 140591004393408) # encoder_hidden_states_query_proj = attn.add_q_proj(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1735 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.add_q_proj._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.add_q_proj.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.add_q_proj.lora_A, 140533119542864) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.add_q_proj.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.add_q_proj.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.add_q_proj.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.add_q_proj.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.add_q_proj.lora_A['default_0'], 140533119538304) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.add_q_proj.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.add_q_proj.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.add_q_proj.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.add_q_proj.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.add_q_proj.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.add_q_proj.lora_A['default_0'].weight, 140537313321072) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.add_q_proj.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.add_q_proj.lora_B, 140533119542912) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.add_q_proj.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.add_q_proj.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.add_q_proj.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.add_q_proj.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.add_q_proj.lora_B['default_0'], 140533119546224) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.add_q_proj.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.add_q_proj.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.add_q_proj.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.add_q_proj.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.add_q_proj.base_layer, 140581767542816) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.add_q_proj.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.add_q_proj.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.add_q_proj.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.add_q_proj.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.add_q_proj.lora_dropout, 140533119542192) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.add_q_proj.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.add_q_proj.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.add_q_proj.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.add_q_proj.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.add_q_proj.lora_dropout['default_0'], 140533119539312) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.add_q_proj.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.add_q_proj.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.add_q_proj.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.add_q_proj.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[8].attn.add_q_proj.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[8].attn.add_q_proj.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.add_q_proj.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[8].attn.add_q_proj.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.add_q_proj.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[8].attn.add_q_proj.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[8].attn.add_q_proj.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.add_q_proj.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.add_q_proj.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.add_q_proj._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.add_q_proj._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.add_q_proj.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[8].attn.add_q_proj.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[8].attn.add_q_proj.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.add_q_proj._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.add_q_proj._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.add_q_proj._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.add_q_proj._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.add_q_proj._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[8].attn.add_q_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[8].attn.add_q_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.add_v_proj, accessed_by=DictGetItemGuardAccessor(add_v_proj) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.add_v_proj, 140533118730880) # encoder_hidden_states_value_proj = attn.add_v_proj(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1737 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.add_v_proj.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[8].attn.add_v_proj.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.add_v_proj.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.add_v_proj.training, 140591004393408) # encoder_hidden_states_value_proj = attn.add_v_proj(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1737 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.add_v_proj._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.add_v_proj.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.add_v_proj.lora_A, 140533118730640) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.add_v_proj.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.add_v_proj.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.add_v_proj.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.add_v_proj.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.add_v_proj.lora_A['default_0'], 140533118746528) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.add_v_proj.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.add_v_proj.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.add_v_proj.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.add_v_proj.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.add_v_proj.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.add_v_proj.lora_A['default_0'].weight, 140537313317792) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.add_v_proj.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.add_v_proj.lora_B, 140533118739136) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.add_v_proj.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.add_v_proj.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.add_v_proj.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.add_v_proj.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.add_v_proj.lora_B['default_0'], 140533118739376) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.add_v_proj.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.add_v_proj.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.add_v_proj.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.add_v_proj.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.add_v_proj.base_layer, 140581767542768) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.add_v_proj.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.add_v_proj.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.add_v_proj.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.add_v_proj.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.add_v_proj.lora_dropout, 140533118734096) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.add_v_proj.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.add_v_proj.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.add_v_proj.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.add_v_proj.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.add_v_proj.lora_dropout['default_0'], 140533118731072) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.add_v_proj.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.add_v_proj.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.add_v_proj.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.add_v_proj.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[8].attn.add_v_proj.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[8].attn.add_v_proj.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.add_v_proj.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[8].attn.add_v_proj.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.add_v_proj.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[8].attn.add_v_proj.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[8].attn.add_v_proj.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.add_v_proj.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.add_v_proj.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.add_v_proj._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.add_v_proj._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.add_v_proj.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[8].attn.add_v_proj.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[8].attn.add_v_proj.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.add_v_proj._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.add_v_proj._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.add_v_proj._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.add_v_proj._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.add_v_proj._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[8].attn.add_v_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[8].attn.add_v_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_add_out, accessed_by=DictGetItemGuardAccessor(to_add_out) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.to_add_out, 140533119546800) # encoder_hidden_states = attn.to_add_out(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1779 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_add_out.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[8].attn.to_add_out.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_add_out.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.to_add_out.training, 140591004393408) # encoder_hidden_states = attn.to_add_out(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1779 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_add_out._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_add_out.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.to_add_out.lora_A, 140533119536192) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_add_out.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_add_out.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.to_add_out.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_add_out.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.to_add_out.lora_A['default_0'], 140533119535040) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_add_out.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_add_out.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.to_add_out.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_add_out.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_add_out.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.to_add_out.lora_A['default_0'].weight, 140537313188560) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_add_out.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.to_add_out.lora_B, 140533119535280) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_add_out.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_add_out.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.to_add_out.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_add_out.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.to_add_out.lora_B['default_0'], 140533119533840) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_add_out.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_add_out.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.to_add_out.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_add_out.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.to_add_out.base_layer, 140581767543008) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_add_out.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_add_out.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.to_add_out.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_add_out.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.to_add_out.lora_dropout, 140533119540320) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_add_out.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_add_out.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.to_add_out.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_add_out.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.to_add_out.lora_dropout['default_0'], 140533119540656) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_add_out.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_add_out.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.to_add_out.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_add_out.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[8].attn.to_add_out.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[8].attn.to_add_out.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_add_out.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[8].attn.to_add_out.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_add_out.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[8].attn.to_add_out.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[8].attn.to_add_out.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_add_out.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.to_add_out.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_add_out._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_add_out._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_add_out.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[8].attn.to_add_out.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[8].attn.to_add_out.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_add_out._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.to_add_out._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_add_out._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_add_out._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_add_out._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[8].attn.to_add_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[8].attn.to_add_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.norm_added_k, accessed_by=DictGetItemGuardAccessor(norm_added_k) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.norm_added_k, 140581767543152) # if attn.norm_added_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1751 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.norm_added_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[8].attn.norm_added_k.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.norm_added_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.norm_added_k.training, 140591004393440) # if attn.norm_added_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1751 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.norm_added_k.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[8].attn.norm_added_k.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.norm_added_k._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.norm_added_k.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.norm_added_k.weight, 140581772720576) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.norm_added_k._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.norm_added_k._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.norm_added_k._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.norm_added_k._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.norm_added_q, accessed_by=DictGetItemGuardAccessor(norm_added_q) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.norm_added_q, 140581767543056) # if attn.norm_added_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1749 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.norm_added_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[8].attn.norm_added_q.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.norm_added_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.norm_added_q.training, 140591004393440) # if attn.norm_added_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1749 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.norm_added_q.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[8].attn.norm_added_q.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.norm_added_q._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.norm_added_q.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.norm_added_q.weight, 140581773245744) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.norm_added_q._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.norm_added_q._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.norm_added_q._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.norm_added_q._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.heads, accessed_by=DictGetItemGuardAccessor(heads) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[8].attn.heads == 24 # head_dim = inner_dim // attn.heads # diffusers/src/diffusers/models/attention_processor.py:1721 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.processor, accessed_by=DictGetItemGuardAccessor(processor) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[8].attn.processor, 93831581524080) # attn_parameters = set(inspect.signature(self.processor.__call__).parameters.keys()) # diffusers/src/diffusers/models/attention_processor.py:479 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.processor, 140581767542288) # return self.processor( # diffusers/src/diffusers/models/attention_processor.py:490 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].norm1, accessed_by=DictGetItemGuardAccessor(norm1) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].norm1, 140581767541856) # norm_hidden_states, gate_msa, shift_mlp, scale_mlp, gate_mlp = self.norm1(hidden_states, emb=temb) # diffusers/src/diffusers/models/transformers/transformer_flux.py:165 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].norm1.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[8].norm1.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].norm1.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].norm1.training, 140591004393440) # norm_hidden_states, gate_msa, shift_mlp, scale_mlp, gate_mlp = self.norm1(hidden_states, emb=temb) # diffusers/src/diffusers/models/transformers/transformer_flux.py:165 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].norm1.emb, accessed_by=DictGetItemGuardAccessor(emb) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].norm1.emb, 140591004478624) # if self.emb is not None: # diffusers/src/diffusers/models/normalization.py:135 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].norm1._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].norm1.norm, accessed_by=DictGetItemGuardAccessor(norm) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].norm1.norm, 140581767542000) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:139 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].norm1.norm.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].norm1.norm.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].norm1.norm.training, 140591004393440) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:139 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].norm1.silu, accessed_by=DictGetItemGuardAccessor(silu) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].norm1.silu, 140581767541904) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].norm1.silu.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].norm1.silu.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].norm1.silu.training, 140591004393440) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].norm1.linear, accessed_by=DictGetItemGuardAccessor(linear) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].norm1.linear, 140533119006944) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].norm1.linear.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[8].norm1.linear.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].norm1.linear.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].norm1.linear.training, 140591004393408) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].norm1.linear._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].norm1.linear.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].norm1.linear.lora_A, 140533119186256) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].norm1.linear.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].norm1.linear.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].norm1.linear.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].norm1.linear.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].norm1.linear.lora_A['default_0'], 140533119179440) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].norm1.linear.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].norm1.linear.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].norm1.linear.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].norm1.linear.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].norm1.linear.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].norm1.linear.lora_A['default_0'].weight, 140537313473168) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].norm1.linear.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].norm1.linear.lora_B, 140533119175936) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].norm1.linear.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].norm1.linear.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].norm1.linear.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].norm1.linear.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].norm1.linear.lora_B['default_0'], 140533119187696) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].norm1.linear.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].norm1.linear.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].norm1.linear.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].norm1.linear.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].norm1.linear.base_layer, 140581767541952) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].norm1.linear.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].norm1.linear.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].norm1.linear.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].norm1.linear.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].norm1.linear.lora_dropout, 140533119173152) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].norm1.linear.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].norm1.linear.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].norm1.linear.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].norm1.linear.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].norm1.linear.lora_dropout['default_0'], 140533119176416) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].norm1.linear.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].norm1.linear.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].norm1.linear.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].norm1.linear.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[8].norm1.linear.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[8].norm1.linear.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].norm1.linear.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[8].norm1.linear.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].norm1.linear.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[8].norm1.linear.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[8].norm1.linear.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].norm1.linear.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].norm1.linear.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].norm1.linear._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].norm1.linear._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].norm1.linear.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[8].norm1.linear.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[8].norm1.linear.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].norm1.linear._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].norm1.linear._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].norm1.linear._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].norm1.linear._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].norm1.linear._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[8].norm1.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[8].norm1.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].norm1._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].norm1._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].norm1._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].norm1._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].norm2, accessed_by=DictGetItemGuardAccessor(norm2) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].norm2, 140581767543200) # norm_hidden_states = self.norm2(hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:182 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].norm2.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].norm2.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].norm2.training, 140591004393440) # norm_hidden_states = self.norm2(hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:182 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff_context, accessed_by=DictGetItemGuardAccessor(ff_context) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].ff_context, 140581767543536) # context_ff_output = self.ff_context(norm_encoder_hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:198 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff_context.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[8].ff_context.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff_context.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].ff_context.training, 140591004393440) # context_ff_output = self.ff_context(norm_encoder_hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:198 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff_context._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff_context.net, accessed_by=DictGetItemGuardAccessor(net) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].ff_context.net, 140581767543680) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[8].ff_context.net, 93831537618768) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- LENGTH_CHECK: len(L['self'].transformer_blocks[8].ff_context.net) == 3 # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff_context.net.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff_context.net.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].ff_context.net.training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff_context.net[0], accessed_by=GetItemGuardAccessor(0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].ff_context.net[0], 140581767543632) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff_context.net[0].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[8].ff_context.net[0].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff_context.net[0].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].ff_context.net[0].training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff_context.net[0]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff_context.net[0].proj, accessed_by=DictGetItemGuardAccessor(proj) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].ff_context.net[0].proj, 140533119493904) # hidden_states = self.proj(hidden_states) # diffusers/src/diffusers/models/activations.py:88 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff_context.net[0].proj.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[8].ff_context.net[0].proj.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff_context.net[0].proj.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].ff_context.net[0].proj.training, 140591004393408) # hidden_states = self.proj(hidden_states) # diffusers/src/diffusers/models/activations.py:88 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff_context.net[0].proj._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff_context.net[0].proj.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].ff_context.net[0].proj.lora_A, 140533119499760) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff_context.net[0].proj.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff_context.net[0].proj.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].ff_context.net[0].proj.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff_context.net[0].proj.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].ff_context.net[0].proj.lora_A['default_0'], 140533119494288) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff_context.net[0].proj.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff_context.net[0].proj.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].ff_context.net[0].proj.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff_context.net[0].proj.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff_context.net[0].proj.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].ff_context.net[0].proj.lora_A['default_0'].weight, 140537313196320) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff_context.net[0].proj.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].ff_context.net[0].proj.lora_B, 140533119495632) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff_context.net[0].proj.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff_context.net[0].proj.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].ff_context.net[0].proj.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff_context.net[0].proj.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].ff_context.net[0].proj.lora_B['default_0'], 140533119490208) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff_context.net[0].proj.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff_context.net[0].proj.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].ff_context.net[0].proj.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff_context.net[0].proj.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].ff_context.net[0].proj.base_layer, 140581767543728) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff_context.net[0].proj.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff_context.net[0].proj.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].ff_context.net[0].proj.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff_context.net[0].proj.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].ff_context.net[0].proj.lora_dropout, 140533119496592) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff_context.net[0].proj.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff_context.net[0].proj.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].ff_context.net[0].proj.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff_context.net[0].proj.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].ff_context.net[0].proj.lora_dropout['default_0'], 140533119492272) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff_context.net[0].proj.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff_context.net[0].proj.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].ff_context.net[0].proj.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff_context.net[0].proj.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[8].ff_context.net[0].proj.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[8].ff_context.net[0].proj.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff_context.net[0].proj.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[8].ff_context.net[0].proj.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff_context.net[0].proj.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[8].ff_context.net[0].proj.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[8].ff_context.net[0].proj.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff_context.net[0].proj.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].ff_context.net[0].proj.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff_context.net[0].proj._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff_context.net[0].proj._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff_context.net[0].proj.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[8].ff_context.net[0].proj.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[8].ff_context.net[0].proj.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff_context.net[0].proj._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].ff_context.net[0].proj._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff_context.net[0].proj._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff_context.net[0].proj._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff_context.net[0].proj._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[8].ff_context.net[0].proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[8].ff_context.net[0].proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff_context.net[0].approximate, accessed_by=DictGetItemGuardAccessor(approximate) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[8].ff_context.net[0].approximate == 'tanh' # return F.gelu(gate, approximate=self.approximate) # diffusers/src/diffusers/models/activations.py:83 in gelu V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff_context.net[0]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff_context.net[0]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff_context.net[0]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff_context.net[0]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff_context.net[1], accessed_by=GetItemGuardAccessor(1) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].ff_context.net[1], 140581767543824) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff_context.net[1].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff_context.net[1].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].ff_context.net[1].training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff_context.net[2], accessed_by=GetItemGuardAccessor(2) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].ff_context.net[2], 140533118743024) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff_context.net[2].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[8].ff_context.net[2].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff_context.net[2].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].ff_context.net[2].training, 140591004393408) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff_context.net[2]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff_context.net[2].lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].ff_context.net[2].lora_A, 140533118285184) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff_context.net[2].lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff_context.net[2].lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].ff_context.net[2].lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff_context.net[2].lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].ff_context.net[2].lora_A['default_0'], 140533118286720) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff_context.net[2].lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff_context.net[2].lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].ff_context.net[2].lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff_context.net[2].lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff_context.net[2].lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].ff_context.net[2].lora_A['default_0'].weight, 140537313188160) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff_context.net[2].lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].ff_context.net[2].lora_B, 140533118287824) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff_context.net[2].lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff_context.net[2].lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].ff_context.net[2].lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff_context.net[2].lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].ff_context.net[2].lora_B['default_0'], 140533118287536) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff_context.net[2].lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff_context.net[2].lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].ff_context.net[2].lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff_context.net[2].base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].ff_context.net[2].base_layer, 140581767543872) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff_context.net[2].base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff_context.net[2].base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].ff_context.net[2].base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff_context.net[2].lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].ff_context.net[2].lora_dropout, 140533118283408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff_context.net[2].lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff_context.net[2].lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].ff_context.net[2].lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff_context.net[2].lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].ff_context.net[2].lora_dropout['default_0'], 140533118287008) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff_context.net[2].lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff_context.net[2].lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].ff_context.net[2].lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff_context.net[2].scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[8].ff_context.net[2].scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[8].ff_context.net[2].scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff_context.net[2].scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[8].ff_context.net[2].scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff_context.net[2].use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[8].ff_context.net[2].use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[8].ff_context.net[2].use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff_context.net[2].use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].ff_context.net[2].use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff_context.net[2]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff_context.net[2]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff_context.net[2].merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[8].ff_context.net[2].merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[8].ff_context.net[2].merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff_context.net[2]._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].ff_context.net[2]._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff_context.net[2]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff_context.net[2]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff_context.net[2]._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[8].ff_context.net[2]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[8].ff_context.net[2]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff_context._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff_context._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff_context._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff_context._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].norm1_context, accessed_by=DictGetItemGuardAccessor(norm1_context) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].norm1_context, 140581767542048) # norm_encoder_hidden_states, c_gate_msa, c_shift_mlp, c_scale_mlp, c_gate_mlp = self.norm1_context( # diffusers/src/diffusers/models/transformers/transformer_flux.py:167 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].norm1_context.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[8].norm1_context.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].norm1_context.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].norm1_context.training, 140591004393440) # norm_encoder_hidden_states, c_gate_msa, c_shift_mlp, c_scale_mlp, c_gate_mlp = self.norm1_context( # diffusers/src/diffusers/models/transformers/transformer_flux.py:167 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].norm1_context.emb, accessed_by=DictGetItemGuardAccessor(emb) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].norm1_context.emb, 140591004478624) # if self.emb is not None: # diffusers/src/diffusers/models/normalization.py:135 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].norm1_context._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].norm1_context.norm, accessed_by=DictGetItemGuardAccessor(norm) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].norm1_context.norm, 140581767542240) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:139 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].norm1_context.norm.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].norm1_context.norm.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].norm1_context.norm.training, 140591004393440) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:139 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].norm1_context.silu, accessed_by=DictGetItemGuardAccessor(silu) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].norm1_context.silu, 140581767542144) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].norm1_context.silu.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].norm1_context.silu.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].norm1_context.silu.training, 140591004393440) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].norm1_context.linear, accessed_by=DictGetItemGuardAccessor(linear) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].norm1_context.linear, 140533119184960) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].norm1_context.linear.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[8].norm1_context.linear.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].norm1_context.linear.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].norm1_context.linear.training, 140591004393408) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].norm1_context.linear._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].norm1_context.linear.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].norm1_context.linear.lora_A, 140533119185392) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].norm1_context.linear.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].norm1_context.linear.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].norm1_context.linear.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].norm1_context.linear.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].norm1_context.linear.lora_A['default_0'], 140533119406848) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].norm1_context.linear.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].norm1_context.linear.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].norm1_context.linear.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].norm1_context.linear.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].norm1_context.linear.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].norm1_context.linear.lora_A['default_0'].weight, 140537313461088) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].norm1_context.linear.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].norm1_context.linear.lora_B, 140533119414048) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].norm1_context.linear.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].norm1_context.linear.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].norm1_context.linear.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].norm1_context.linear.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].norm1_context.linear.lora_B['default_0'], 140533119406704) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].norm1_context.linear.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].norm1_context.linear.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].norm1_context.linear.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].norm1_context.linear.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].norm1_context.linear.base_layer, 140581767542192) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].norm1_context.linear.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].norm1_context.linear.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].norm1_context.linear.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].norm1_context.linear.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].norm1_context.linear.lora_dropout, 140533119188032) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].norm1_context.linear.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].norm1_context.linear.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].norm1_context.linear.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].norm1_context.linear.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].norm1_context.linear.lora_dropout['default_0'], 140533119179584) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].norm1_context.linear.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].norm1_context.linear.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].norm1_context.linear.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].norm1_context.linear.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[8].norm1_context.linear.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[8].norm1_context.linear.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].norm1_context.linear.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[8].norm1_context.linear.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].norm1_context.linear.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[8].norm1_context.linear.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[8].norm1_context.linear.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].norm1_context.linear.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].norm1_context.linear.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].norm1_context.linear._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].norm1_context.linear._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].norm1_context.linear.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[8].norm1_context.linear.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[8].norm1_context.linear.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].norm1_context.linear._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].norm1_context.linear._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].norm1_context.linear._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].norm1_context.linear._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].norm1_context.linear._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[8].norm1_context.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[8].norm1_context.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].norm1_context._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].norm1_context._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].norm1_context._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].norm1_context._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].norm2_context, accessed_by=DictGetItemGuardAccessor(norm2_context) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].norm2_context, 140581767543248) # norm_encoder_hidden_states = self.norm2_context(encoder_hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:195 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].norm2_context.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].norm2_context.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].norm2_context.training, 140591004393440) # norm_encoder_hidden_states = self.norm2_context(encoder_hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:195 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | +- GuardManager: source=L['self'].transformer_blocks[9], accessed_by=GetItemGuardAccessor(9) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9], 140581767541520) # for index_block, block in enumerate(self.transformer_blocks): # diffusers/src/diffusers/models/transformers/transformer_flux.py:471 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[9].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].training, 140591004393440) # for index_block, block in enumerate(self.transformer_blocks): # diffusers/src/diffusers/models/transformers/transformer_flux.py:471 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff, accessed_by=DictGetItemGuardAccessor(ff) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].ff, 140581769888144) # ff_output = self.ff(norm_hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:185 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[9].ff.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].ff.training, 140591004393440) # ff_output = self.ff(norm_hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:185 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff.net, accessed_by=DictGetItemGuardAccessor(net) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].ff.net, 140581769888384) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[9].ff.net, 93831537618768) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- LENGTH_CHECK: len(L['self'].transformer_blocks[9].ff.net) == 3 # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff.net.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff.net.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].ff.net.training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff.net[0], accessed_by=GetItemGuardAccessor(0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].ff.net[0], 140581769888336) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff.net[0].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[9].ff.net[0].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff.net[0].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].ff.net[0].training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff.net[0]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff.net[0].proj, accessed_by=DictGetItemGuardAccessor(proj) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].ff.net[0].proj, 140533118775456) # hidden_states = self.proj(hidden_states) # diffusers/src/diffusers/models/activations.py:88 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff.net[0].proj.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[9].ff.net[0].proj.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff.net[0].proj.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].ff.net[0].proj.training, 140591004393408) # hidden_states = self.proj(hidden_states) # diffusers/src/diffusers/models/activations.py:88 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff.net[0].proj._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff.net[0].proj.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].ff.net[0].proj.lora_A, 140533118665728) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff.net[0].proj.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff.net[0].proj.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].ff.net[0].proj.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff.net[0].proj.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].ff.net[0].proj.lora_A['default_0'], 140533118667696) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff.net[0].proj.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff.net[0].proj.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].ff.net[0].proj.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff.net[0].proj.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff.net[0].proj.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].ff.net[0].proj.lora_A['default_0'].weight, 140537315005024) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff.net[0].proj.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].ff.net[0].proj.lora_B, 140533118667936) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff.net[0].proj.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff.net[0].proj.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].ff.net[0].proj.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff.net[0].proj.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].ff.net[0].proj.lora_B['default_0'], 140533118667600) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff.net[0].proj.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff.net[0].proj.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].ff.net[0].proj.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff.net[0].proj.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].ff.net[0].proj.base_layer, 140581769888432) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff.net[0].proj.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff.net[0].proj.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].ff.net[0].proj.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff.net[0].proj.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].ff.net[0].proj.lora_dropout, 140533118668128) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff.net[0].proj.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff.net[0].proj.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].ff.net[0].proj.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff.net[0].proj.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].ff.net[0].proj.lora_dropout['default_0'], 140533118667552) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff.net[0].proj.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff.net[0].proj.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].ff.net[0].proj.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff.net[0].proj.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[9].ff.net[0].proj.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[9].ff.net[0].proj.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff.net[0].proj.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[9].ff.net[0].proj.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff.net[0].proj.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[9].ff.net[0].proj.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[9].ff.net[0].proj.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff.net[0].proj.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].ff.net[0].proj.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff.net[0].proj._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff.net[0].proj._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff.net[0].proj.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[9].ff.net[0].proj.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[9].ff.net[0].proj.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff.net[0].proj._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].ff.net[0].proj._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff.net[0].proj._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff.net[0].proj._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff.net[0].proj._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[9].ff.net[0].proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[9].ff.net[0].proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff.net[0].approximate, accessed_by=DictGetItemGuardAccessor(approximate) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[9].ff.net[0].approximate == 'tanh' # return F.gelu(gate, approximate=self.approximate) # diffusers/src/diffusers/models/activations.py:83 in gelu V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff.net[0]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff.net[0]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff.net[0]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff.net[0]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff.net[1], accessed_by=GetItemGuardAccessor(1) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].ff.net[1], 140581769888480) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff.net[1].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff.net[1].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].ff.net[1].training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff.net[2], accessed_by=GetItemGuardAccessor(2) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].ff.net[2], 140533118680272) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff.net[2].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[9].ff.net[2].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff.net[2].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].ff.net[2].training, 140591004393408) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff.net[2]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff.net[2].lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].ff.net[2].lora_A, 140533118677872) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff.net[2].lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff.net[2].lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].ff.net[2].lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff.net[2].lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].ff.net[2].lora_A['default_0'], 140533118667216) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff.net[2].lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff.net[2].lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].ff.net[2].lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff.net[2].lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff.net[2].lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].ff.net[2].lora_A['default_0'].weight, 140537315001264) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff.net[2].lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].ff.net[2].lora_B, 140533118676768) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff.net[2].lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff.net[2].lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].ff.net[2].lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff.net[2].lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].ff.net[2].lora_B['default_0'], 140533118667072) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff.net[2].lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff.net[2].lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].ff.net[2].lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff.net[2].base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].ff.net[2].base_layer, 140581769888528) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff.net[2].base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff.net[2].base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].ff.net[2].base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff.net[2].lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].ff.net[2].lora_dropout, 140533118675760) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff.net[2].lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff.net[2].lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].ff.net[2].lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff.net[2].lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].ff.net[2].lora_dropout['default_0'], 140533118679168) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff.net[2].lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff.net[2].lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].ff.net[2].lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff.net[2].scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[9].ff.net[2].scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[9].ff.net[2].scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff.net[2].scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[9].ff.net[2].scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff.net[2].use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[9].ff.net[2].use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[9].ff.net[2].use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff.net[2].use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].ff.net[2].use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff.net[2]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff.net[2]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff.net[2].merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[9].ff.net[2].merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[9].ff.net[2].merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff.net[2]._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].ff.net[2]._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff.net[2]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff.net[2]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff.net[2]._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[9].ff.net[2]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[9].ff.net[2]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn, accessed_by=DictGetItemGuardAccessor(attn) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn, 140581767544400) # attn_output, context_attn_output = self.attn( # diffusers/src/diffusers/models/transformers/transformer_flux.py:172 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[9].attn.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.training, 140591004393440) # attn_output, context_attn_output = self.attn( # diffusers/src/diffusers/models/transformers/transformer_flux.py:172 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_k, accessed_by=DictGetItemGuardAccessor(to_k) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.to_k, 140533118285856) # key = attn.to_k(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1717 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[9].attn.to_k.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.to_k.training, 140591004393408) # key = attn.to_k(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1717 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_k._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_k.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.to_k.lora_A, 140533118225184) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_k.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_k.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.to_k.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_k.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.to_k.lora_A['default_0'], 140533118228064) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_k.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_k.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.to_k.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_k.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_k.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.to_k.lora_A['default_0'].weight, 140537315118752) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_k.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.to_k.lora_B, 140533118238288) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_k.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_k.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.to_k.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_k.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.to_k.lora_B['default_0'], 140533118222496) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_k.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_k.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.to_k.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_k.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.to_k.base_layer, 140581767544544) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_k.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_k.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.to_k.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_k.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.to_k.lora_dropout, 140533118232336) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_k.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_k.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.to_k.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_k.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.to_k.lora_dropout['default_0'], 140533118238000) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_k.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_k.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.to_k.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_k.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[9].attn.to_k.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[9].attn.to_k.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_k.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[9].attn.to_k.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_k.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[9].attn.to_k.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[9].attn.to_k.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_k.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.to_k.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_k._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_k._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_k.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[9].attn.to_k.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[9].attn.to_k.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_k._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.to_k._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_k._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_k._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_k._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[9].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[9].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_q, accessed_by=DictGetItemGuardAccessor(to_q) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.to_q, 140533118284176) # query = attn.to_q(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1716 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[9].attn.to_q.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.to_q.training, 140591004393408) # query = attn.to_q(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1716 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_q._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_q.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.to_q.lora_A, 140533118283984) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_q.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_q.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.to_q.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_q.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.to_q.lora_A['default_0'], 140533118276208) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_q.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_q.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.to_q.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_q.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_q.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.to_q.lora_A['default_0'].weight, 140537315121392) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_q.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.to_q.lora_B, 140533118280048) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_q.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_q.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.to_q.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_q.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.to_q.lora_B['default_0'], 140533118276496) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_q.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_q.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.to_q.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_q.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.to_q.base_layer, 140581767544640) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_q.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_q.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.to_q.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_q.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.to_q.lora_dropout, 140533118284368) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_q.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_q.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.to_q.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_q.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.to_q.lora_dropout['default_0'], 140533118284272) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_q.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_q.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.to_q.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_q.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[9].attn.to_q.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[9].attn.to_q.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_q.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[9].attn.to_q.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_q.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[9].attn.to_q.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[9].attn.to_q.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_q.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.to_q.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_q._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_q._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_q.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[9].attn.to_q.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[9].attn.to_q.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_q._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.to_q._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_q._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_q._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_q._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[9].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[9].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_v, accessed_by=DictGetItemGuardAccessor(to_v) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.to_v, 140533118236416) # value = attn.to_v(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1718 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_v.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[9].attn.to_v.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_v.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.to_v.training, 140591004393408) # value = attn.to_v(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1718 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_v._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_v.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.to_v.lora_A, 140533118235888) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_v.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_v.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.to_v.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_v.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.to_v.lora_A['default_0'], 140533118232240) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_v.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_v.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.to_v.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_v.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_v.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.to_v.lora_A['default_0'].weight, 140537315127792) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_v.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.to_v.lora_B, 140533118224512) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_v.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_v.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.to_v.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_v.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.to_v.lora_B['default_0'], 140533118232192) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_v.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_v.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.to_v.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_v.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.to_v.base_layer, 140581767544736) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_v.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_v.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.to_v.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_v.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.to_v.lora_dropout, 140533118234160) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_v.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_v.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.to_v.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_v.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.to_v.lora_dropout['default_0'], 140533118224464) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_v.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_v.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.to_v.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_v.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[9].attn.to_v.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[9].attn.to_v.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_v.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[9].attn.to_v.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_v.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[9].attn.to_v.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[9].attn.to_v.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_v.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.to_v.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_v._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_v._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_v.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[9].attn.to_v.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[9].attn.to_v.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_v._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.to_v._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_v._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_v._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_v._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[9].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[9].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.norm_k, accessed_by=DictGetItemGuardAccessor(norm_k) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.norm_k, 140581767544592) # if attn.norm_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1729 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.norm_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[9].attn.norm_k.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.norm_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.norm_k.training, 140591004393440) # if attn.norm_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1729 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.norm_k.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[9].attn.norm_k.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.norm_k._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.norm_k.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.norm_k.weight, 140581765826736) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.norm_k._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.norm_k._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.norm_k._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.norm_k._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.norm_q, accessed_by=DictGetItemGuardAccessor(norm_q) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.norm_q, 140581767544496) # if attn.norm_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1727 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.norm_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[9].attn.norm_q.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.norm_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.norm_q.training, 140591004393440) # if attn.norm_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1727 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.norm_q.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[9].attn.norm_q.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.norm_q._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.norm_q.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.norm_q.weight, 140581772717536) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.norm_q._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.norm_q._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.norm_q._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.norm_q._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_out, accessed_by=DictGetItemGuardAccessor(to_out) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.to_out, 140581769887904) # hidden_states = attn.to_out[0](hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1776 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_out.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_out.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.to_out.training, 140591004393440) # hidden_states = attn.to_out[0](hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1776 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_out[0], accessed_by=GetItemGuardAccessor(0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.to_out[0], 140533118606528) # hidden_states = attn.to_out[0](hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1776 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_out[0].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[9].attn.to_out[0].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_out[0].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.to_out[0].training, 140591004393408) # hidden_states = attn.to_out[0](hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1776 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_out[0]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_out[0].lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.to_out[0].lora_A, 140533118609888) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_out[0].lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_out[0].lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.to_out[0].lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_out[0].lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.to_out[0].lora_A['default_0'], 140533118769840) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_out[0].lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_out[0].lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.to_out[0].lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_out[0].lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_out[0].lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.to_out[0].lora_A['default_0'].weight, 140537315006944) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_out[0].lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.to_out[0].lora_B, 140533118609168) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_out[0].lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_out[0].lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.to_out[0].lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_out[0].lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.to_out[0].lora_B['default_0'], 140533118769552) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_out[0].lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_out[0].lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.to_out[0].lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_out[0].base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.to_out[0].base_layer, 140581769887952) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_out[0].base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_out[0].base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.to_out[0].base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_out[0].lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.to_out[0].lora_dropout, 140533118612864) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_out[0].lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_out[0].lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.to_out[0].lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_out[0].lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.to_out[0].lora_dropout['default_0'], 140533118614880) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_out[0].lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_out[0].lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.to_out[0].lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_out[0].scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[9].attn.to_out[0].scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[9].attn.to_out[0].scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_out[0].scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[9].attn.to_out[0].scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_out[0].use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[9].attn.to_out[0].use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[9].attn.to_out[0].use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_out[0].use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.to_out[0].use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_out[0]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_out[0]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_out[0].merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[9].attn.to_out[0].merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[9].attn.to_out[0].merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_out[0]._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.to_out[0]._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_out[0]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_out[0]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_out[0]._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[9].attn.to_out[0]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[9].attn.to_out[0]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_out[1], accessed_by=GetItemGuardAccessor(1) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.to_out[1], 140581769888000) # hidden_states = attn.to_out[1](hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1778 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_out[1].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_out[1].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.to_out[1].training, 140591004393440) # hidden_states = attn.to_out[1](hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1778 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.add_k_proj, accessed_by=DictGetItemGuardAccessor(add_k_proj) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.add_k_proj, 140533118236272) # encoder_hidden_states_key_proj = attn.add_k_proj(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1736 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.add_k_proj.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[9].attn.add_k_proj.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.add_k_proj.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.add_k_proj.training, 140591004393408) # encoder_hidden_states_key_proj = attn.add_k_proj(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1736 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.add_k_proj._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.add_k_proj.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.add_k_proj.lora_A, 140533118236896) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.add_k_proj.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.add_k_proj.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.add_k_proj.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.add_k_proj.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.add_k_proj.lora_A['default_0'], 140533118606576) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.add_k_proj.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.add_k_proj.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.add_k_proj.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.add_k_proj.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.add_k_proj.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.add_k_proj.lora_A['default_0'].weight, 140537315124432) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.add_k_proj.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.add_k_proj.lora_B, 140533118235840) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.add_k_proj.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.add_k_proj.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.add_k_proj.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.add_k_proj.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.add_k_proj.lora_B['default_0'], 140533118604704) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.add_k_proj.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.add_k_proj.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.add_k_proj.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.add_k_proj.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.add_k_proj.base_layer, 140581767544784) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.add_k_proj.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.add_k_proj.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.add_k_proj.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.add_k_proj.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.add_k_proj.lora_dropout, 140533118233152) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.add_k_proj.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.add_k_proj.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.add_k_proj.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.add_k_proj.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.add_k_proj.lora_dropout['default_0'], 140533118233728) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.add_k_proj.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.add_k_proj.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.add_k_proj.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.add_k_proj.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[9].attn.add_k_proj.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[9].attn.add_k_proj.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.add_k_proj.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[9].attn.add_k_proj.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.add_k_proj.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[9].attn.add_k_proj.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[9].attn.add_k_proj.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.add_k_proj.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.add_k_proj.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.add_k_proj._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.add_k_proj._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.add_k_proj.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[9].attn.add_k_proj.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[9].attn.add_k_proj.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.add_k_proj._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.add_k_proj._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.add_k_proj._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.add_k_proj._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.add_k_proj._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[9].attn.add_k_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[9].attn.add_k_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.add_q_proj, accessed_by=DictGetItemGuardAccessor(add_q_proj) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.add_q_proj, 140533118611472) # encoder_hidden_states_query_proj = attn.add_q_proj(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1735 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.add_q_proj.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[9].attn.add_q_proj.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.add_q_proj.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.add_q_proj.training, 140591004393408) # encoder_hidden_states_query_proj = attn.add_q_proj(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1735 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.add_q_proj._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.add_q_proj.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.add_q_proj.lora_A, 140533118610800) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.add_q_proj.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.add_q_proj.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.add_q_proj.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.add_q_proj.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.add_q_proj.lora_A['default_0'], 140533118611568) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.add_q_proj.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.add_q_proj.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.add_q_proj.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.add_q_proj.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.add_q_proj.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.add_q_proj.lora_A['default_0'].weight, 140537315008224) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.add_q_proj.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.add_q_proj.lora_B, 140533118609504) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.add_q_proj.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.add_q_proj.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.add_q_proj.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.add_q_proj.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.add_q_proj.lora_B['default_0'], 140533118612528) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.add_q_proj.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.add_q_proj.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.add_q_proj.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.add_q_proj.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.add_q_proj.base_layer, 140581769887856) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.add_q_proj.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.add_q_proj.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.add_q_proj.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.add_q_proj.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.add_q_proj.lora_dropout, 140533118611136) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.add_q_proj.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.add_q_proj.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.add_q_proj.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.add_q_proj.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.add_q_proj.lora_dropout['default_0'], 140533118610656) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.add_q_proj.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.add_q_proj.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.add_q_proj.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.add_q_proj.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[9].attn.add_q_proj.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[9].attn.add_q_proj.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.add_q_proj.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[9].attn.add_q_proj.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.add_q_proj.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[9].attn.add_q_proj.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[9].attn.add_q_proj.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.add_q_proj.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.add_q_proj.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.add_q_proj._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.add_q_proj._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.add_q_proj.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[9].attn.add_q_proj.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[9].attn.add_q_proj.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.add_q_proj._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.add_q_proj._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.add_q_proj._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.add_q_proj._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.add_q_proj._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[9].attn.add_q_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[9].attn.add_q_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.add_v_proj, accessed_by=DictGetItemGuardAccessor(add_v_proj) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.add_v_proj, 140533118612960) # encoder_hidden_states_value_proj = attn.add_v_proj(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1737 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.add_v_proj.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[9].attn.add_v_proj.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.add_v_proj.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.add_v_proj.training, 140591004393408) # encoder_hidden_states_value_proj = attn.add_v_proj(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1737 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.add_v_proj._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.add_v_proj.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.add_v_proj.lora_A, 140533118606912) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.add_v_proj.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.add_v_proj.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.add_v_proj.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.add_v_proj.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.add_v_proj.lora_A['default_0'], 140533118604848) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.add_v_proj.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.add_v_proj.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.add_v_proj.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.add_v_proj.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.add_v_proj.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.add_v_proj.lora_A['default_0'].weight, 140537315115952) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.add_v_proj.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.add_v_proj.lora_B, 140533118612912) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.add_v_proj.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.add_v_proj.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.add_v_proj.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.add_v_proj.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.add_v_proj.lora_B['default_0'], 140533118610848) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.add_v_proj.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.add_v_proj.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.add_v_proj.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.add_v_proj.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.add_v_proj.base_layer, 140581769887808) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.add_v_proj.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.add_v_proj.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.add_v_proj.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.add_v_proj.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.add_v_proj.lora_dropout, 140533118613920) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.add_v_proj.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.add_v_proj.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.add_v_proj.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.add_v_proj.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.add_v_proj.lora_dropout['default_0'], 140533118614976) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.add_v_proj.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.add_v_proj.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.add_v_proj.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.add_v_proj.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[9].attn.add_v_proj.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[9].attn.add_v_proj.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.add_v_proj.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[9].attn.add_v_proj.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.add_v_proj.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[9].attn.add_v_proj.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[9].attn.add_v_proj.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.add_v_proj.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.add_v_proj.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.add_v_proj._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.add_v_proj._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.add_v_proj.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[9].attn.add_v_proj.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[9].attn.add_v_proj.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.add_v_proj._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.add_v_proj._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.add_v_proj._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.add_v_proj._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.add_v_proj._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[9].attn.add_v_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[9].attn.add_v_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_add_out, accessed_by=DictGetItemGuardAccessor(to_add_out) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.to_add_out, 140533118776512) # encoder_hidden_states = attn.to_add_out(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1779 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_add_out.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[9].attn.to_add_out.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_add_out.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.to_add_out.training, 140591004393408) # encoder_hidden_states = attn.to_add_out(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1779 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_add_out._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_add_out.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.to_add_out.lora_A, 140533118775888) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_add_out.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_add_out.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.to_add_out.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_add_out.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.to_add_out.lora_A['default_0'], 140533118768496) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_add_out.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_add_out.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.to_add_out.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_add_out.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_add_out.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.to_add_out.lora_A['default_0'].weight, 140537315010144) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_add_out.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.to_add_out.lora_B, 140533118763360) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_add_out.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_add_out.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.to_add_out.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_add_out.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.to_add_out.lora_B['default_0'], 140533118776320) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_add_out.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_add_out.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.to_add_out.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_add_out.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.to_add_out.base_layer, 140581769888048) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_add_out.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_add_out.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.to_add_out.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_add_out.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.to_add_out.lora_dropout, 140533118776080) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_add_out.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_add_out.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.to_add_out.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_add_out.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.to_add_out.lora_dropout['default_0'], 140533118776224) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_add_out.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_add_out.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.to_add_out.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_add_out.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[9].attn.to_add_out.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[9].attn.to_add_out.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_add_out.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[9].attn.to_add_out.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_add_out.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[9].attn.to_add_out.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[9].attn.to_add_out.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_add_out.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.to_add_out.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_add_out._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_add_out._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_add_out.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[9].attn.to_add_out.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[9].attn.to_add_out.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_add_out._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.to_add_out._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_add_out._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_add_out._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_add_out._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[9].attn.to_add_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[9].attn.to_add_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.norm_added_k, accessed_by=DictGetItemGuardAccessor(norm_added_k) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.norm_added_k, 140581769888192) # if attn.norm_added_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1751 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.norm_added_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[9].attn.norm_added_k.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.norm_added_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.norm_added_k.training, 140591004393440) # if attn.norm_added_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1751 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.norm_added_k.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[9].attn.norm_added_k.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.norm_added_k._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.norm_added_k.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.norm_added_k.weight, 140581765826976) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.norm_added_k._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.norm_added_k._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.norm_added_k._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.norm_added_k._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.norm_added_q, accessed_by=DictGetItemGuardAccessor(norm_added_q) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.norm_added_q, 140581769888096) # if attn.norm_added_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1749 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.norm_added_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[9].attn.norm_added_q.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.norm_added_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.norm_added_q.training, 140591004393440) # if attn.norm_added_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1749 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.norm_added_q.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[9].attn.norm_added_q.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.norm_added_q._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.norm_added_q.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.norm_added_q.weight, 140581772714736) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.norm_added_q._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.norm_added_q._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.norm_added_q._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.norm_added_q._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.heads, accessed_by=DictGetItemGuardAccessor(heads) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[9].attn.heads == 24 # head_dim = inner_dim // attn.heads # diffusers/src/diffusers/models/attention_processor.py:1721 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.processor, accessed_by=DictGetItemGuardAccessor(processor) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[9].attn.processor, 93831581524080) # attn_parameters = set(inspect.signature(self.processor.__call__).parameters.keys()) # diffusers/src/diffusers/models/attention_processor.py:479 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.processor, 140581767544352) # return self.processor( # diffusers/src/diffusers/models/attention_processor.py:490 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].norm1, accessed_by=DictGetItemGuardAccessor(norm1) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].norm1, 140581767543920) # norm_hidden_states, gate_msa, shift_mlp, scale_mlp, gate_mlp = self.norm1(hidden_states, emb=temb) # diffusers/src/diffusers/models/transformers/transformer_flux.py:165 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].norm1.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[9].norm1.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].norm1.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].norm1.training, 140591004393440) # norm_hidden_states, gate_msa, shift_mlp, scale_mlp, gate_mlp = self.norm1(hidden_states, emb=temb) # diffusers/src/diffusers/models/transformers/transformer_flux.py:165 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].norm1.emb, accessed_by=DictGetItemGuardAccessor(emb) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].norm1.emb, 140591004478624) # if self.emb is not None: # diffusers/src/diffusers/models/normalization.py:135 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].norm1._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].norm1.norm, accessed_by=DictGetItemGuardAccessor(norm) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].norm1.norm, 140581767544064) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:139 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].norm1.norm.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].norm1.norm.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].norm1.norm.training, 140591004393440) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:139 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].norm1.silu, accessed_by=DictGetItemGuardAccessor(silu) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].norm1.silu, 140581767543968) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].norm1.silu.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].norm1.silu.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].norm1.silu.training, 140591004393440) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].norm1.linear, accessed_by=DictGetItemGuardAccessor(linear) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].norm1.linear, 140533118287152) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].norm1.linear.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[9].norm1.linear.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].norm1.linear.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].norm1.linear.training, 140591004393408) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].norm1.linear._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].norm1.linear.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].norm1.linear.lora_A, 140533118273616) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].norm1.linear.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].norm1.linear.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].norm1.linear.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].norm1.linear.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].norm1.linear.lora_A['default_0'], 140533118285424) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].norm1.linear.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].norm1.linear.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].norm1.linear.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].norm1.linear.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].norm1.linear.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].norm1.linear.lora_A['default_0'].weight, 140537313182720) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].norm1.linear.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].norm1.linear.lora_B, 140533118285808) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].norm1.linear.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].norm1.linear.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].norm1.linear.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].norm1.linear.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].norm1.linear.lora_B['default_0'], 140533118276112) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].norm1.linear.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].norm1.linear.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].norm1.linear.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].norm1.linear.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].norm1.linear.base_layer, 140581767544016) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].norm1.linear.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].norm1.linear.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].norm1.linear.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].norm1.linear.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].norm1.linear.lora_dropout, 140533118287392) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].norm1.linear.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].norm1.linear.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].norm1.linear.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].norm1.linear.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].norm1.linear.lora_dropout['default_0'], 140533118285952) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].norm1.linear.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].norm1.linear.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].norm1.linear.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].norm1.linear.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[9].norm1.linear.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[9].norm1.linear.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].norm1.linear.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[9].norm1.linear.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].norm1.linear.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[9].norm1.linear.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[9].norm1.linear.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].norm1.linear.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].norm1.linear.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].norm1.linear._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].norm1.linear._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].norm1.linear.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[9].norm1.linear.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[9].norm1.linear.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].norm1.linear._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].norm1.linear._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].norm1.linear._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].norm1.linear._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].norm1.linear._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[9].norm1.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[9].norm1.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].norm1._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].norm1._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].norm1._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].norm1._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].norm2, accessed_by=DictGetItemGuardAccessor(norm2) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].norm2, 140581769888240) # norm_hidden_states = self.norm2(hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:182 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].norm2.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].norm2.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].norm2.training, 140591004393440) # norm_hidden_states = self.norm2(hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:182 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff_context, accessed_by=DictGetItemGuardAccessor(ff_context) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].ff_context, 140581769888576) # context_ff_output = self.ff_context(norm_encoder_hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:198 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff_context.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[9].ff_context.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff_context.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].ff_context.training, 140591004393440) # context_ff_output = self.ff_context(norm_encoder_hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:198 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff_context._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff_context.net, accessed_by=DictGetItemGuardAccessor(net) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].ff_context.net, 140581769888720) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[9].ff_context.net, 93831537618768) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- LENGTH_CHECK: len(L['self'].transformer_blocks[9].ff_context.net) == 3 # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff_context.net.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff_context.net.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].ff_context.net.training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff_context.net[0], accessed_by=GetItemGuardAccessor(0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].ff_context.net[0], 140581769888672) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff_context.net[0].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[9].ff_context.net[0].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff_context.net[0].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].ff_context.net[0].training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff_context.net[0]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff_context.net[0].proj, accessed_by=DictGetItemGuardAccessor(proj) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].ff_context.net[0].proj, 140533118669184) # hidden_states = self.proj(hidden_states) # diffusers/src/diffusers/models/activations.py:88 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff_context.net[0].proj.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[9].ff_context.net[0].proj.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff_context.net[0].proj.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].ff_context.net[0].proj.training, 140591004393408) # hidden_states = self.proj(hidden_states) # diffusers/src/diffusers/models/activations.py:88 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff_context.net[0].proj._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff_context.net[0].proj.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].ff_context.net[0].proj.lora_A, 140533118667360) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff_context.net[0].proj.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff_context.net[0].proj.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].ff_context.net[0].proj.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff_context.net[0].proj.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].ff_context.net[0].proj.lora_A['default_0'], 140533118676672) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff_context.net[0].proj.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff_context.net[0].proj.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].ff_context.net[0].proj.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff_context.net[0].proj.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff_context.net[0].proj.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].ff_context.net[0].proj.lora_A['default_0'].weight, 140537314738400) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff_context.net[0].proj.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].ff_context.net[0].proj.lora_B, 140533118666880) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff_context.net[0].proj.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff_context.net[0].proj.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].ff_context.net[0].proj.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff_context.net[0].proj.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].ff_context.net[0].proj.lora_B['default_0'], 140533118672112) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff_context.net[0].proj.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff_context.net[0].proj.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].ff_context.net[0].proj.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff_context.net[0].proj.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].ff_context.net[0].proj.base_layer, 140581769888768) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff_context.net[0].proj.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff_context.net[0].proj.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].ff_context.net[0].proj.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff_context.net[0].proj.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].ff_context.net[0].proj.lora_dropout, 140533118670000) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff_context.net[0].proj.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff_context.net[0].proj.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].ff_context.net[0].proj.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff_context.net[0].proj.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].ff_context.net[0].proj.lora_dropout['default_0'], 140533118670048) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff_context.net[0].proj.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff_context.net[0].proj.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].ff_context.net[0].proj.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff_context.net[0].proj.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[9].ff_context.net[0].proj.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[9].ff_context.net[0].proj.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff_context.net[0].proj.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[9].ff_context.net[0].proj.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff_context.net[0].proj.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[9].ff_context.net[0].proj.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[9].ff_context.net[0].proj.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff_context.net[0].proj.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].ff_context.net[0].proj.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff_context.net[0].proj._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff_context.net[0].proj._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff_context.net[0].proj.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[9].ff_context.net[0].proj.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[9].ff_context.net[0].proj.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff_context.net[0].proj._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].ff_context.net[0].proj._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff_context.net[0].proj._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff_context.net[0].proj._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff_context.net[0].proj._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[9].ff_context.net[0].proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[9].ff_context.net[0].proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff_context.net[0].approximate, accessed_by=DictGetItemGuardAccessor(approximate) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[9].ff_context.net[0].approximate == 'tanh' # return F.gelu(gate, approximate=self.approximate) # diffusers/src/diffusers/models/activations.py:83 in gelu V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff_context.net[0]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff_context.net[0]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff_context.net[0]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff_context.net[0]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff_context.net[1], accessed_by=GetItemGuardAccessor(1) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].ff_context.net[1], 140581769888864) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff_context.net[1].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff_context.net[1].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].ff_context.net[1].training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff_context.net[2], accessed_by=GetItemGuardAccessor(2) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].ff_context.net[2], 140533118675712) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff_context.net[2].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[9].ff_context.net[2].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff_context.net[2].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].ff_context.net[2].training, 140591004393408) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff_context.net[2]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff_context.net[2].lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].ff_context.net[2].lora_A, 140533118679264) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff_context.net[2].lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff_context.net[2].lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].ff_context.net[2].lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff_context.net[2].lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].ff_context.net[2].lora_A['default_0'], 140533118678832) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff_context.net[2].lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff_context.net[2].lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].ff_context.net[2].lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff_context.net[2].lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff_context.net[2].lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].ff_context.net[2].lora_A['default_0'].weight, 140537314752880) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff_context.net[2].lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].ff_context.net[2].lora_B, 140533118679792) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff_context.net[2].lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff_context.net[2].lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].ff_context.net[2].lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff_context.net[2].lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].ff_context.net[2].lora_B['default_0'], 140533118676240) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff_context.net[2].lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff_context.net[2].lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].ff_context.net[2].lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff_context.net[2].base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].ff_context.net[2].base_layer, 140581769888912) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff_context.net[2].base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff_context.net[2].base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].ff_context.net[2].base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff_context.net[2].lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].ff_context.net[2].lora_dropout, 140533118678784) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff_context.net[2].lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff_context.net[2].lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].ff_context.net[2].lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff_context.net[2].lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].ff_context.net[2].lora_dropout['default_0'], 140533118675952) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff_context.net[2].lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff_context.net[2].lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].ff_context.net[2].lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff_context.net[2].scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[9].ff_context.net[2].scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[9].ff_context.net[2].scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff_context.net[2].scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[9].ff_context.net[2].scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff_context.net[2].use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[9].ff_context.net[2].use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[9].ff_context.net[2].use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff_context.net[2].use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].ff_context.net[2].use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff_context.net[2]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff_context.net[2]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff_context.net[2].merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[9].ff_context.net[2].merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[9].ff_context.net[2].merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff_context.net[2]._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].ff_context.net[2]._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff_context.net[2]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff_context.net[2]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff_context.net[2]._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[9].ff_context.net[2]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[9].ff_context.net[2]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff_context._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff_context._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff_context._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff_context._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].norm1_context, accessed_by=DictGetItemGuardAccessor(norm1_context) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].norm1_context, 140581767544112) # norm_encoder_hidden_states, c_gate_msa, c_shift_mlp, c_scale_mlp, c_gate_mlp = self.norm1_context( # diffusers/src/diffusers/models/transformers/transformer_flux.py:167 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].norm1_context.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[9].norm1_context.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].norm1_context.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].norm1_context.training, 140591004393440) # norm_encoder_hidden_states, c_gate_msa, c_shift_mlp, c_scale_mlp, c_gate_mlp = self.norm1_context( # diffusers/src/diffusers/models/transformers/transformer_flux.py:167 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].norm1_context.emb, accessed_by=DictGetItemGuardAccessor(emb) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].norm1_context.emb, 140591004478624) # if self.emb is not None: # diffusers/src/diffusers/models/normalization.py:135 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].norm1_context._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].norm1_context.norm, accessed_by=DictGetItemGuardAccessor(norm) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].norm1_context.norm, 140581767544304) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:139 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].norm1_context.norm.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].norm1_context.norm.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].norm1_context.norm.training, 140591004393440) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:139 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].norm1_context.silu, accessed_by=DictGetItemGuardAccessor(silu) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].norm1_context.silu, 140581767544208) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].norm1_context.silu.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].norm1_context.silu.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].norm1_context.silu.training, 140591004393440) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].norm1_context.linear, accessed_by=DictGetItemGuardAccessor(linear) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].norm1_context.linear, 140533118273712) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].norm1_context.linear.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[9].norm1_context.linear.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].norm1_context.linear.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].norm1_context.linear.training, 140591004393408) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].norm1_context.linear._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].norm1_context.linear.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].norm1_context.linear.lora_A, 140533118278368) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].norm1_context.linear.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].norm1_context.linear.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].norm1_context.linear.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].norm1_context.linear.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].norm1_context.linear.lora_A['default_0'], 140533118283168) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].norm1_context.linear.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].norm1_context.linear.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].norm1_context.linear.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].norm1_context.linear.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].norm1_context.linear.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].norm1_context.linear.lora_A['default_0'].weight, 140537315117152) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].norm1_context.linear.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].norm1_context.linear.lora_B, 140533118279376) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].norm1_context.linear.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].norm1_context.linear.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].norm1_context.linear.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].norm1_context.linear.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].norm1_context.linear.lora_B['default_0'], 140533118280816) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].norm1_context.linear.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].norm1_context.linear.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].norm1_context.linear.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].norm1_context.linear.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].norm1_context.linear.base_layer, 140581767544256) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].norm1_context.linear.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].norm1_context.linear.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].norm1_context.linear.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].norm1_context.linear.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].norm1_context.linear.lora_dropout, 140533118284128) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].norm1_context.linear.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].norm1_context.linear.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].norm1_context.linear.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].norm1_context.linear.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].norm1_context.linear.lora_dropout['default_0'], 140533118275152) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].norm1_context.linear.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].norm1_context.linear.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].norm1_context.linear.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].norm1_context.linear.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[9].norm1_context.linear.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[9].norm1_context.linear.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].norm1_context.linear.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[9].norm1_context.linear.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].norm1_context.linear.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[9].norm1_context.linear.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[9].norm1_context.linear.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].norm1_context.linear.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].norm1_context.linear.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].norm1_context.linear._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].norm1_context.linear._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].norm1_context.linear.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[9].norm1_context.linear.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[9].norm1_context.linear.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].norm1_context.linear._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].norm1_context.linear._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].norm1_context.linear._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].norm1_context.linear._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].norm1_context.linear._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[9].norm1_context.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[9].norm1_context.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].norm1_context._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].norm1_context._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].norm1_context._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].norm1_context._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].norm2_context, accessed_by=DictGetItemGuardAccessor(norm2_context) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].norm2_context, 140581769888288) # norm_encoder_hidden_states = self.norm2_context(encoder_hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:195 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].norm2_context.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].norm2_context.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].norm2_context.training, 140591004393440) # norm_encoder_hidden_states = self.norm2_context(encoder_hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:195 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | +- GuardManager: source=L['self'].transformer_blocks[10], accessed_by=GetItemGuardAccessor(10) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10], 140581767543584) # for index_block, block in enumerate(self.transformer_blocks): # diffusers/src/diffusers/models/transformers/transformer_flux.py:471 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[10].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].training, 140591004393440) # for index_block, block in enumerate(self.transformer_blocks): # diffusers/src/diffusers/models/transformers/transformer_flux.py:471 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff, accessed_by=DictGetItemGuardAccessor(ff) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].ff, 140581769890208) # ff_output = self.ff(norm_hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:185 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[10].ff.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].ff.training, 140591004393440) # ff_output = self.ff(norm_hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:185 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff.net, accessed_by=DictGetItemGuardAccessor(net) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].ff.net, 140581769890448) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[10].ff.net, 93831537618768) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- LENGTH_CHECK: len(L['self'].transformer_blocks[10].ff.net) == 3 # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff.net.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff.net.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].ff.net.training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff.net[0], accessed_by=GetItemGuardAccessor(0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].ff.net[0], 140581769890400) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff.net[0].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[10].ff.net[0].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff.net[0].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].ff.net[0].training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff.net[0]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff.net[0].proj, accessed_by=DictGetItemGuardAccessor(proj) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].ff.net[0].proj, 140533117587696) # hidden_states = self.proj(hidden_states) # diffusers/src/diffusers/models/activations.py:88 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff.net[0].proj.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[10].ff.net[0].proj.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff.net[0].proj.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].ff.net[0].proj.training, 140591004393408) # hidden_states = self.proj(hidden_states) # diffusers/src/diffusers/models/activations.py:88 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff.net[0].proj._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff.net[0].proj.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].ff.net[0].proj.lora_A, 140533117588560) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff.net[0].proj.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff.net[0].proj.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].ff.net[0].proj.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff.net[0].proj.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].ff.net[0].proj.lora_A['default_0'], 140533117593024) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff.net[0].proj.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff.net[0].proj.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].ff.net[0].proj.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff.net[0].proj.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff.net[0].proj.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].ff.net[0].proj.lora_A['default_0'].weight, 140537314492256) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff.net[0].proj.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].ff.net[0].proj.lora_B, 140533117590624) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff.net[0].proj.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff.net[0].proj.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].ff.net[0].proj.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff.net[0].proj.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].ff.net[0].proj.lora_B['default_0'], 140533117594512) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff.net[0].proj.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff.net[0].proj.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].ff.net[0].proj.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff.net[0].proj.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].ff.net[0].proj.base_layer, 140581769890496) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff.net[0].proj.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff.net[0].proj.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].ff.net[0].proj.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff.net[0].proj.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].ff.net[0].proj.lora_dropout, 140533117589472) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff.net[0].proj.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff.net[0].proj.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].ff.net[0].proj.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff.net[0].proj.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].ff.net[0].proj.lora_dropout['default_0'], 140533117588848) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff.net[0].proj.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff.net[0].proj.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].ff.net[0].proj.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff.net[0].proj.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[10].ff.net[0].proj.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[10].ff.net[0].proj.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff.net[0].proj.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[10].ff.net[0].proj.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff.net[0].proj.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[10].ff.net[0].proj.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[10].ff.net[0].proj.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff.net[0].proj.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].ff.net[0].proj.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff.net[0].proj._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff.net[0].proj._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff.net[0].proj.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[10].ff.net[0].proj.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[10].ff.net[0].proj.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff.net[0].proj._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].ff.net[0].proj._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff.net[0].proj._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff.net[0].proj._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff.net[0].proj._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[10].ff.net[0].proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[10].ff.net[0].proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff.net[0].approximate, accessed_by=DictGetItemGuardAccessor(approximate) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[10].ff.net[0].approximate == 'tanh' # return F.gelu(gate, approximate=self.approximate) # diffusers/src/diffusers/models/activations.py:83 in gelu V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff.net[0]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff.net[0]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff.net[0]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff.net[0]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff.net[1], accessed_by=GetItemGuardAccessor(1) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].ff.net[1], 140581769890544) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff.net[1].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff.net[1].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].ff.net[1].training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff.net[2], accessed_by=GetItemGuardAccessor(2) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].ff.net[2], 140533117589952) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff.net[2].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[10].ff.net[2].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff.net[2].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].ff.net[2].training, 140591004393408) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff.net[2]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff.net[2].lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].ff.net[2].lora_A, 140533117587744) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff.net[2].lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff.net[2].lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].ff.net[2].lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff.net[2].lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].ff.net[2].lora_A['default_0'], 140533118789104) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff.net[2].lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff.net[2].lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].ff.net[2].lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff.net[2].lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff.net[2].lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].ff.net[2].lora_A['default_0'].weight, 140537314482656) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff.net[2].lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].ff.net[2].lora_B, 140533117589760) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff.net[2].lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff.net[2].lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].ff.net[2].lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff.net[2].lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].ff.net[2].lora_B['default_0'], 140533118789440) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff.net[2].lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff.net[2].lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].ff.net[2].lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff.net[2].base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].ff.net[2].base_layer, 140581769890592) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff.net[2].base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff.net[2].base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].ff.net[2].base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff.net[2].lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].ff.net[2].lora_dropout, 140533117593648) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff.net[2].lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff.net[2].lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].ff.net[2].lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff.net[2].lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].ff.net[2].lora_dropout['default_0'], 140533117594176) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff.net[2].lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff.net[2].lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].ff.net[2].lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff.net[2].scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[10].ff.net[2].scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[10].ff.net[2].scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff.net[2].scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[10].ff.net[2].scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff.net[2].use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[10].ff.net[2].use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[10].ff.net[2].use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff.net[2].use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].ff.net[2].use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff.net[2]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff.net[2]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff.net[2].merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[10].ff.net[2].merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[10].ff.net[2].merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff.net[2]._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].ff.net[2]._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff.net[2]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff.net[2]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff.net[2]._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[10].ff.net[2]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[10].ff.net[2]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn, accessed_by=DictGetItemGuardAccessor(attn) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn, 140581769889440) # attn_output, context_attn_output = self.attn( # diffusers/src/diffusers/models/transformers/transformer_flux.py:172 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[10].attn.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.training, 140591004393440) # attn_output, context_attn_output = self.attn( # diffusers/src/diffusers/models/transformers/transformer_flux.py:172 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_k, accessed_by=DictGetItemGuardAccessor(to_k) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.to_k, 140533116877024) # key = attn.to_k(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1717 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[10].attn.to_k.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.to_k.training, 140591004393408) # key = attn.to_k(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1717 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_k._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_k.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.to_k.lora_A, 140533116876928) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_k.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_k.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.to_k.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_k.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.to_k.lora_A['default_0'], 140533116878416) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_k.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_k.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.to_k.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_k.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_k.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.to_k.lora_A['default_0'].weight, 140537314740560) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_k.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.to_k.lora_B, 140533116876832) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_k.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_k.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.to_k.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_k.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.to_k.lora_B['default_0'], 140533116876304) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_k.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_k.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.to_k.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_k.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.to_k.base_layer, 140581769889584) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_k.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_k.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.to_k.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_k.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.to_k.lora_dropout, 140533116877072) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_k.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_k.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.to_k.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_k.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.to_k.lora_dropout['default_0'], 140533116876016) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_k.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_k.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.to_k.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_k.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[10].attn.to_k.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[10].attn.to_k.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_k.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[10].attn.to_k.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_k.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[10].attn.to_k.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[10].attn.to_k.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_k.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.to_k.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_k._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_k._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_k.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[10].attn.to_k.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[10].attn.to_k.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_k._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.to_k._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_k._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_k._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_k._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[10].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[10].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_q, accessed_by=DictGetItemGuardAccessor(to_q) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.to_q, 140533116813072) # query = attn.to_q(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1716 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[10].attn.to_q.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.to_q.training, 140591004393408) # query = attn.to_q(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1716 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_q._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_q.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.to_q.lora_A, 140533116813168) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_q.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_q.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.to_q.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_q.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.to_q.lora_A['default_0'], 140533116806304) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_q.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_q.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.to_q.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_q.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_q.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.to_q.lora_A['default_0'].weight, 140537314738560) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_q.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.to_q.lora_B, 140533116812016) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_q.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_q.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.to_q.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_q.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.to_q.lora_B['default_0'], 140533116812592) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_q.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_q.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.to_q.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_q.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.to_q.base_layer, 140581769889680) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_q.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_q.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.to_q.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_q.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.to_q.lora_dropout, 140533116809856) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_q.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_q.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.to_q.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_q.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.to_q.lora_dropout['default_0'], 140533116811152) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_q.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_q.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.to_q.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_q.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[10].attn.to_q.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[10].attn.to_q.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_q.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[10].attn.to_q.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_q.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[10].attn.to_q.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[10].attn.to_q.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_q.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.to_q.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_q._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_q._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_q.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[10].attn.to_q.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[10].attn.to_q.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_q._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.to_q._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_q._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_q._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_q._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[10].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[10].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_v, accessed_by=DictGetItemGuardAccessor(to_v) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.to_v, 140533116876112) # value = attn.to_v(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1718 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_v.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[10].attn.to_v.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_v.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.to_v.training, 140591004393408) # value = attn.to_v(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1718 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_v._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_v.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.to_v.lora_A, 140533116871984) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_v.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_v.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.to_v.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_v.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.to_v.lora_A['default_0'], 140533116863824) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_v.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_v.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.to_v.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_v.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_v.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.to_v.lora_A['default_0'].weight, 140537314594864) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_v.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.to_v.lora_B, 140533116863680) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_v.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_v.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.to_v.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_v.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.to_v.lora_B['default_0'], 140533116862768) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_v.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_v.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.to_v.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_v.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.to_v.base_layer, 140581769889776) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_v.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_v.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.to_v.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_v.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.to_v.lora_dropout, 140533116872272) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_v.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_v.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.to_v.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_v.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.to_v.lora_dropout['default_0'], 140533116877936) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_v.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_v.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.to_v.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_v.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[10].attn.to_v.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[10].attn.to_v.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_v.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[10].attn.to_v.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_v.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[10].attn.to_v.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[10].attn.to_v.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_v.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.to_v.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_v._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_v._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_v.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[10].attn.to_v.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[10].attn.to_v.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_v._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.to_v._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_v._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_v._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_v._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[10].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[10].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.norm_k, accessed_by=DictGetItemGuardAccessor(norm_k) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.norm_k, 140581769889632) # if attn.norm_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1729 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.norm_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[10].attn.norm_k.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.norm_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.norm_k.training, 140591004393440) # if attn.norm_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1729 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.norm_k.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[10].attn.norm_k.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.norm_k._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.norm_k.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.norm_k.weight, 140581765992896) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.norm_k._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.norm_k._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.norm_k._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.norm_k._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.norm_q, accessed_by=DictGetItemGuardAccessor(norm_q) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.norm_q, 140581769889536) # if attn.norm_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1727 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.norm_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[10].attn.norm_q.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.norm_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.norm_q.training, 140591004393440) # if attn.norm_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1727 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.norm_q.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[10].attn.norm_q.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.norm_q._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.norm_q.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.norm_q.weight, 140581765992976) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.norm_q._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.norm_q._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.norm_q._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.norm_q._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_out, accessed_by=DictGetItemGuardAccessor(to_out) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.to_out, 140581769889968) # hidden_states = attn.to_out[0](hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1776 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_out.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_out.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.to_out.training, 140591004393440) # hidden_states = attn.to_out[0](hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1776 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_out[0], accessed_by=GetItemGuardAccessor(0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.to_out[0], 140533117589232) # hidden_states = attn.to_out[0](hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1776 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_out[0].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[10].attn.to_out[0].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_out[0].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.to_out[0].training, 140591004393408) # hidden_states = attn.to_out[0](hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1776 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_out[0]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_out[0].lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.to_out[0].lora_A, 140533117594416) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_out[0].lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_out[0].lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.to_out[0].lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_out[0].lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.to_out[0].lora_A['default_0'], 140533117595520) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_out[0].lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_out[0].lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.to_out[0].lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_out[0].lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_out[0].lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.to_out[0].lora_A['default_0'].weight, 140537314602544) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_out[0].lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.to_out[0].lora_B, 140533117595712) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_out[0].lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_out[0].lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.to_out[0].lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_out[0].lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.to_out[0].lora_B['default_0'], 140533117589328) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_out[0].lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_out[0].lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.to_out[0].lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_out[0].base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.to_out[0].base_layer, 140581769890016) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_out[0].base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_out[0].base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.to_out[0].base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_out[0].lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.to_out[0].lora_dropout, 140533117595376) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_out[0].lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_out[0].lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.to_out[0].lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_out[0].lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.to_out[0].lora_dropout['default_0'], 140533117590432) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_out[0].lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_out[0].lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.to_out[0].lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_out[0].scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[10].attn.to_out[0].scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[10].attn.to_out[0].scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_out[0].scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[10].attn.to_out[0].scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_out[0].use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[10].attn.to_out[0].use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[10].attn.to_out[0].use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_out[0].use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.to_out[0].use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_out[0]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_out[0]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_out[0].merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[10].attn.to_out[0].merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[10].attn.to_out[0].merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_out[0]._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.to_out[0]._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_out[0]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_out[0]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_out[0]._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[10].attn.to_out[0]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[10].attn.to_out[0]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_out[1], accessed_by=GetItemGuardAccessor(1) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.to_out[1], 140581769890064) # hidden_states = attn.to_out[1](hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1778 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_out[1].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_out[1].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.to_out[1].training, 140591004393440) # hidden_states = attn.to_out[1](hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1778 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.add_k_proj, accessed_by=DictGetItemGuardAccessor(add_k_proj) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.add_k_proj, 140533116870592) # encoder_hidden_states_key_proj = attn.add_k_proj(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1736 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.add_k_proj.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[10].attn.add_k_proj.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.add_k_proj.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.add_k_proj.training, 140591004393408) # encoder_hidden_states_key_proj = attn.add_k_proj(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1736 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.add_k_proj._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.add_k_proj.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.add_k_proj.lora_A, 140533116875632) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.add_k_proj.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.add_k_proj.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.add_k_proj.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.add_k_proj.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.add_k_proj.lora_A['default_0'], 140533116877552) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.add_k_proj.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.add_k_proj.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.add_k_proj.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.add_k_proj.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.add_k_proj.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.add_k_proj.lora_A['default_0'].weight, 140537314600544) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.add_k_proj.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.add_k_proj.lora_B, 140533116874528) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.add_k_proj.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.add_k_proj.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.add_k_proj.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.add_k_proj.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.add_k_proj.lora_B['default_0'], 140533116862912) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.add_k_proj.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.add_k_proj.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.add_k_proj.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.add_k_proj.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.add_k_proj.base_layer, 140581769889824) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.add_k_proj.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.add_k_proj.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.add_k_proj.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.add_k_proj.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.add_k_proj.lora_dropout, 140533116876400) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.add_k_proj.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.add_k_proj.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.add_k_proj.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.add_k_proj.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.add_k_proj.lora_dropout['default_0'], 140533116875920) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.add_k_proj.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.add_k_proj.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.add_k_proj.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.add_k_proj.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[10].attn.add_k_proj.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[10].attn.add_k_proj.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.add_k_proj.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[10].attn.add_k_proj.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.add_k_proj.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[10].attn.add_k_proj.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[10].attn.add_k_proj.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.add_k_proj.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.add_k_proj.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.add_k_proj._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.add_k_proj._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.add_k_proj.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[10].attn.add_k_proj.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[10].attn.add_k_proj.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.add_k_proj._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.add_k_proj._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.add_k_proj._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.add_k_proj._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.add_k_proj._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[10].attn.add_k_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[10].attn.add_k_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.add_q_proj, accessed_by=DictGetItemGuardAccessor(add_q_proj) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.add_q_proj, 140533116744080) # encoder_hidden_states_query_proj = attn.add_q_proj(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1735 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.add_q_proj.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[10].attn.add_q_proj.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.add_q_proj.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.add_q_proj.training, 140591004393408) # encoder_hidden_states_query_proj = attn.add_q_proj(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1735 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.add_q_proj._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.add_q_proj.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.add_q_proj.lora_A, 140533117587024) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.add_q_proj.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.add_q_proj.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.add_q_proj.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.add_q_proj.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.add_q_proj.lora_A['default_0'], 140533117586640) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.add_q_proj.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.add_q_proj.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.add_q_proj.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.add_q_proj.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.add_q_proj.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.add_q_proj.lora_A['default_0'].weight, 140537314605984) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.add_q_proj.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.add_q_proj.lora_B, 140533117590048) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.add_q_proj.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.add_q_proj.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.add_q_proj.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.add_q_proj.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.add_q_proj.lora_B['default_0'], 140533117588800) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.add_q_proj.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.add_q_proj.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.add_q_proj.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.add_q_proj.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.add_q_proj.base_layer, 140581769889920) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.add_q_proj.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.add_q_proj.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.add_q_proj.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.add_q_proj.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.add_q_proj.lora_dropout, 140533117595232) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.add_q_proj.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.add_q_proj.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.add_q_proj.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.add_q_proj.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.add_q_proj.lora_dropout['default_0'], 140533117587072) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.add_q_proj.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.add_q_proj.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.add_q_proj.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.add_q_proj.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[10].attn.add_q_proj.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[10].attn.add_q_proj.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.add_q_proj.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[10].attn.add_q_proj.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.add_q_proj.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[10].attn.add_q_proj.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[10].attn.add_q_proj.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.add_q_proj.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.add_q_proj.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.add_q_proj._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.add_q_proj._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.add_q_proj.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[10].attn.add_q_proj.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[10].attn.add_q_proj.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.add_q_proj._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.add_q_proj._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.add_q_proj._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.add_q_proj._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.add_q_proj._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[10].attn.add_q_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[10].attn.add_q_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.add_v_proj, accessed_by=DictGetItemGuardAccessor(add_v_proj) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.add_v_proj, 140533116875344) # encoder_hidden_states_value_proj = attn.add_v_proj(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1737 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.add_v_proj.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[10].attn.add_v_proj.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.add_v_proj.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.add_v_proj.training, 140591004393408) # encoder_hidden_states_value_proj = attn.add_v_proj(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1737 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.add_v_proj._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.add_v_proj.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.add_v_proj.lora_A, 140533116878368) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.add_v_proj.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.add_v_proj.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.add_v_proj.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.add_v_proj.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.add_v_proj.lora_A['default_0'], 140533116736016) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.add_v_proj.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.add_v_proj.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.add_v_proj.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.add_v_proj.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.add_v_proj.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.add_v_proj.lora_A['default_0'].weight, 140537314592064) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.add_v_proj.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.add_v_proj.lora_B, 140533116873904) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.add_v_proj.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.add_v_proj.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.add_v_proj.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.add_v_proj.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.add_v_proj.lora_B['default_0'], 140533116736112) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.add_v_proj.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.add_v_proj.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.add_v_proj.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.add_v_proj.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.add_v_proj.base_layer, 140581769889872) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.add_v_proj.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.add_v_proj.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.add_v_proj.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.add_v_proj.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.add_v_proj.lora_dropout, 140533116877648) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.add_v_proj.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.add_v_proj.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.add_v_proj.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.add_v_proj.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.add_v_proj.lora_dropout['default_0'], 140533116875248) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.add_v_proj.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.add_v_proj.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.add_v_proj.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.add_v_proj.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[10].attn.add_v_proj.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[10].attn.add_v_proj.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.add_v_proj.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[10].attn.add_v_proj.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.add_v_proj.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[10].attn.add_v_proj.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[10].attn.add_v_proj.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.add_v_proj.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.add_v_proj.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.add_v_proj._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.add_v_proj._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.add_v_proj.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[10].attn.add_v_proj.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[10].attn.add_v_proj.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.add_v_proj._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.add_v_proj._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.add_v_proj._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.add_v_proj._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.add_v_proj._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[10].attn.add_v_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[10].attn.add_v_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_add_out, accessed_by=DictGetItemGuardAccessor(to_add_out) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.to_add_out, 140533117599504) # encoder_hidden_states = attn.to_add_out(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1779 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_add_out.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[10].attn.to_add_out.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_add_out.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.to_add_out.training, 140591004393408) # encoder_hidden_states = attn.to_add_out(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1779 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_add_out._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_add_out.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.to_add_out.lora_A, 140533117593456) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_add_out.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_add_out.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.to_add_out.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_add_out.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.to_add_out.lora_A['default_0'], 140533117587456) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_add_out.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_add_out.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.to_add_out.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_add_out.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_add_out.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.to_add_out.lora_A['default_0'].weight, 140537314600304) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_add_out.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.to_add_out.lora_B, 140533117591392) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_add_out.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_add_out.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.to_add_out.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_add_out.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.to_add_out.lora_B['default_0'], 140533117588656) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_add_out.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_add_out.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.to_add_out.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_add_out.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.to_add_out.base_layer, 140581769890112) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_add_out.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_add_out.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.to_add_out.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_add_out.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.to_add_out.lora_dropout, 140533117592928) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_add_out.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_add_out.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.to_add_out.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_add_out.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.to_add_out.lora_dropout['default_0'], 140533117593408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_add_out.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_add_out.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.to_add_out.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_add_out.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[10].attn.to_add_out.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[10].attn.to_add_out.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_add_out.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[10].attn.to_add_out.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_add_out.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[10].attn.to_add_out.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[10].attn.to_add_out.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_add_out.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.to_add_out.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_add_out._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_add_out._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_add_out.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[10].attn.to_add_out.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[10].attn.to_add_out.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_add_out._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.to_add_out._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_add_out._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_add_out._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_add_out._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[10].attn.to_add_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[10].attn.to_add_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.norm_added_k, accessed_by=DictGetItemGuardAccessor(norm_added_k) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.norm_added_k, 140581769890256) # if attn.norm_added_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1751 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.norm_added_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[10].attn.norm_added_k.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.norm_added_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.norm_added_k.training, 140591004393440) # if attn.norm_added_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1751 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.norm_added_k.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[10].attn.norm_added_k.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.norm_added_k._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.norm_added_k.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.norm_added_k.weight, 140581765992736) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.norm_added_k._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.norm_added_k._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.norm_added_k._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.norm_added_k._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.norm_added_q, accessed_by=DictGetItemGuardAccessor(norm_added_q) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.norm_added_q, 140581769890160) # if attn.norm_added_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1749 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.norm_added_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[10].attn.norm_added_q.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.norm_added_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.norm_added_q.training, 140591004393440) # if attn.norm_added_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1749 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.norm_added_q.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[10].attn.norm_added_q.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.norm_added_q._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.norm_added_q.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.norm_added_q.weight, 140581765992816) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.norm_added_q._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.norm_added_q._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.norm_added_q._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.norm_added_q._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.heads, accessed_by=DictGetItemGuardAccessor(heads) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[10].attn.heads == 24 # head_dim = inner_dim // attn.heads # diffusers/src/diffusers/models/attention_processor.py:1721 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.processor, accessed_by=DictGetItemGuardAccessor(processor) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[10].attn.processor, 93831581524080) # attn_parameters = set(inspect.signature(self.processor.__call__).parameters.keys()) # diffusers/src/diffusers/models/attention_processor.py:479 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.processor, 140581769889392) # return self.processor( # diffusers/src/diffusers/models/attention_processor.py:490 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].norm1, accessed_by=DictGetItemGuardAccessor(norm1) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].norm1, 140581769888960) # norm_hidden_states, gate_msa, shift_mlp, scale_mlp, gate_mlp = self.norm1(hidden_states, emb=temb) # diffusers/src/diffusers/models/transformers/transformer_flux.py:165 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].norm1.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[10].norm1.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].norm1.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].norm1.training, 140591004393440) # norm_hidden_states, gate_msa, shift_mlp, scale_mlp, gate_mlp = self.norm1(hidden_states, emb=temb) # diffusers/src/diffusers/models/transformers/transformer_flux.py:165 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].norm1.emb, accessed_by=DictGetItemGuardAccessor(emb) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].norm1.emb, 140591004478624) # if self.emb is not None: # diffusers/src/diffusers/models/normalization.py:135 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].norm1._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].norm1.norm, accessed_by=DictGetItemGuardAccessor(norm) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].norm1.norm, 140581769889104) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:139 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].norm1.norm.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].norm1.norm.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].norm1.norm.training, 140591004393440) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:139 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].norm1.silu, accessed_by=DictGetItemGuardAccessor(silu) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].norm1.silu, 140581769889008) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].norm1.silu.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].norm1.silu.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].norm1.silu.training, 140591004393440) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].norm1.linear, accessed_by=DictGetItemGuardAccessor(linear) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].norm1.linear, 140533118675664) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].norm1.linear.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[10].norm1.linear.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].norm1.linear.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].norm1.linear.training, 140591004393408) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].norm1.linear._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].norm1.linear.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].norm1.linear.lora_A, 140533118678352) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].norm1.linear.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].norm1.linear.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].norm1.linear.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].norm1.linear.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].norm1.linear.lora_A['default_0'], 140533118676432) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].norm1.linear.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].norm1.linear.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].norm1.linear.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].norm1.linear.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].norm1.linear.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].norm1.linear.lora_A['default_0'].weight, 140537314752800) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].norm1.linear.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].norm1.linear.lora_B, 140533118677488) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].norm1.linear.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].norm1.linear.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].norm1.linear.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].norm1.linear.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].norm1.linear.lora_B['default_0'], 140533118677440) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].norm1.linear.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].norm1.linear.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].norm1.linear.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].norm1.linear.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].norm1.linear.base_layer, 140581769889056) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].norm1.linear.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].norm1.linear.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].norm1.linear.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].norm1.linear.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].norm1.linear.lora_dropout, 140533118676048) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].norm1.linear.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].norm1.linear.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].norm1.linear.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].norm1.linear.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].norm1.linear.lora_dropout['default_0'], 140533118676192) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].norm1.linear.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].norm1.linear.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].norm1.linear.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].norm1.linear.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[10].norm1.linear.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[10].norm1.linear.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].norm1.linear.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[10].norm1.linear.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].norm1.linear.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[10].norm1.linear.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[10].norm1.linear.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].norm1.linear.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].norm1.linear.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].norm1.linear._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].norm1.linear._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].norm1.linear.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[10].norm1.linear.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[10].norm1.linear.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].norm1.linear._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].norm1.linear._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].norm1.linear._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].norm1.linear._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].norm1.linear._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[10].norm1.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[10].norm1.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].norm1._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].norm1._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].norm1._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].norm1._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].norm2, accessed_by=DictGetItemGuardAccessor(norm2) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].norm2, 140581769890304) # norm_hidden_states = self.norm2(hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:182 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].norm2.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].norm2.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].norm2.training, 140591004393440) # norm_hidden_states = self.norm2(hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:182 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff_context, accessed_by=DictGetItemGuardAccessor(ff_context) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].ff_context, 140581769890640) # context_ff_output = self.ff_context(norm_encoder_hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:198 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff_context.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[10].ff_context.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff_context.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].ff_context.training, 140591004393440) # context_ff_output = self.ff_context(norm_encoder_hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:198 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff_context._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff_context.net, accessed_by=DictGetItemGuardAccessor(net) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].ff_context.net, 140581769890784) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[10].ff_context.net, 93831537618768) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- LENGTH_CHECK: len(L['self'].transformer_blocks[10].ff_context.net) == 3 # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff_context.net.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff_context.net.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].ff_context.net.training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff_context.net[0], accessed_by=GetItemGuardAccessor(0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].ff_context.net[0], 140581769890736) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff_context.net[0].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[10].ff_context.net[0].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff_context.net[0].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].ff_context.net[0].training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff_context.net[0]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff_context.net[0].proj, accessed_by=DictGetItemGuardAccessor(proj) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].ff_context.net[0].proj, 140533118789296) # hidden_states = self.proj(hidden_states) # diffusers/src/diffusers/models/activations.py:88 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff_context.net[0].proj.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[10].ff_context.net[0].proj.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff_context.net[0].proj.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].ff_context.net[0].proj.training, 140591004393408) # hidden_states = self.proj(hidden_states) # diffusers/src/diffusers/models/activations.py:88 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff_context.net[0].proj._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff_context.net[0].proj.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].ff_context.net[0].proj.lora_A, 140533118793568) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff_context.net[0].proj.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff_context.net[0].proj.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].ff_context.net[0].proj.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff_context.net[0].proj.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].ff_context.net[0].proj.lora_A['default_0'], 140533117371824) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff_context.net[0].proj.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff_context.net[0].proj.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].ff_context.net[0].proj.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff_context.net[0].proj.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff_context.net[0].proj.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].ff_context.net[0].proj.lora_A['default_0'].weight, 140537314491456) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff_context.net[0].proj.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].ff_context.net[0].proj.lora_B, 140533118785792) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff_context.net[0].proj.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff_context.net[0].proj.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].ff_context.net[0].proj.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff_context.net[0].proj.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].ff_context.net[0].proj.lora_B['default_0'], 140533117371872) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff_context.net[0].proj.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff_context.net[0].proj.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].ff_context.net[0].proj.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff_context.net[0].proj.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].ff_context.net[0].proj.base_layer, 140581769890832) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff_context.net[0].proj.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff_context.net[0].proj.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].ff_context.net[0].proj.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff_context.net[0].proj.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].ff_context.net[0].proj.lora_dropout, 140533118780752) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff_context.net[0].proj.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff_context.net[0].proj.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].ff_context.net[0].proj.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff_context.net[0].proj.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].ff_context.net[0].proj.lora_dropout['default_0'], 140533118792224) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff_context.net[0].proj.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff_context.net[0].proj.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].ff_context.net[0].proj.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff_context.net[0].proj.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[10].ff_context.net[0].proj.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[10].ff_context.net[0].proj.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff_context.net[0].proj.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[10].ff_context.net[0].proj.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff_context.net[0].proj.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[10].ff_context.net[0].proj.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[10].ff_context.net[0].proj.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff_context.net[0].proj.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].ff_context.net[0].proj.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff_context.net[0].proj._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff_context.net[0].proj._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff_context.net[0].proj.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[10].ff_context.net[0].proj.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[10].ff_context.net[0].proj.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff_context.net[0].proj._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].ff_context.net[0].proj._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff_context.net[0].proj._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff_context.net[0].proj._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff_context.net[0].proj._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[10].ff_context.net[0].proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[10].ff_context.net[0].proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff_context.net[0].approximate, accessed_by=DictGetItemGuardAccessor(approximate) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[10].ff_context.net[0].approximate == 'tanh' # return F.gelu(gate, approximate=self.approximate) # diffusers/src/diffusers/models/activations.py:83 in gelu V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff_context.net[0]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff_context.net[0]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff_context.net[0]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff_context.net[0]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff_context.net[1], accessed_by=GetItemGuardAccessor(1) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].ff_context.net[1], 140581769890928) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff_context.net[1].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff_context.net[1].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].ff_context.net[1].training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff_context.net[2], accessed_by=GetItemGuardAccessor(2) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].ff_context.net[2], 140533119490640) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff_context.net[2].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[10].ff_context.net[2].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff_context.net[2].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].ff_context.net[2].training, 140591004393408) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff_context.net[2]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff_context.net[2].lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].ff_context.net[2].lora_A, 140533116940688) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff_context.net[2].lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff_context.net[2].lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].ff_context.net[2].lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff_context.net[2].lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].ff_context.net[2].lora_A['default_0'], 140533116934112) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff_context.net[2].lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff_context.net[2].lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].ff_context.net[2].lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff_context.net[2].lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff_context.net[2].lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].ff_context.net[2].lora_A['default_0'].weight, 140537314491776) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff_context.net[2].lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].ff_context.net[2].lora_B, 140533116940640) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff_context.net[2].lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff_context.net[2].lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].ff_context.net[2].lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff_context.net[2].lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].ff_context.net[2].lora_B['default_0'], 140533116940160) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff_context.net[2].lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff_context.net[2].lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].ff_context.net[2].lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff_context.net[2].base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].ff_context.net[2].base_layer, 140581769890976) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff_context.net[2].base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff_context.net[2].base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].ff_context.net[2].base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff_context.net[2].lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].ff_context.net[2].lora_dropout, 140533116935600) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff_context.net[2].lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff_context.net[2].lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].ff_context.net[2].lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff_context.net[2].lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].ff_context.net[2].lora_dropout['default_0'], 140533116938912) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff_context.net[2].lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff_context.net[2].lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].ff_context.net[2].lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff_context.net[2].scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[10].ff_context.net[2].scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[10].ff_context.net[2].scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff_context.net[2].scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[10].ff_context.net[2].scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff_context.net[2].use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[10].ff_context.net[2].use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[10].ff_context.net[2].use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff_context.net[2].use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].ff_context.net[2].use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff_context.net[2]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff_context.net[2]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff_context.net[2].merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[10].ff_context.net[2].merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[10].ff_context.net[2].merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff_context.net[2]._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].ff_context.net[2]._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff_context.net[2]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff_context.net[2]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff_context.net[2]._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[10].ff_context.net[2]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[10].ff_context.net[2]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff_context._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff_context._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff_context._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff_context._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].norm1_context, accessed_by=DictGetItemGuardAccessor(norm1_context) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].norm1_context, 140581769889152) # norm_encoder_hidden_states, c_gate_msa, c_shift_mlp, c_scale_mlp, c_gate_mlp = self.norm1_context( # diffusers/src/diffusers/models/transformers/transformer_flux.py:167 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].norm1_context.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[10].norm1_context.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].norm1_context.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].norm1_context.training, 140591004393440) # norm_encoder_hidden_states, c_gate_msa, c_shift_mlp, c_scale_mlp, c_gate_mlp = self.norm1_context( # diffusers/src/diffusers/models/transformers/transformer_flux.py:167 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].norm1_context.emb, accessed_by=DictGetItemGuardAccessor(emb) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].norm1_context.emb, 140591004478624) # if self.emb is not None: # diffusers/src/diffusers/models/normalization.py:135 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].norm1_context._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].norm1_context.norm, accessed_by=DictGetItemGuardAccessor(norm) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].norm1_context.norm, 140581769889344) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:139 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].norm1_context.norm.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].norm1_context.norm.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].norm1_context.norm.training, 140591004393440) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:139 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].norm1_context.silu, accessed_by=DictGetItemGuardAccessor(silu) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].norm1_context.silu, 140581769889248) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].norm1_context.silu.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].norm1_context.silu.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].norm1_context.silu.training, 140591004393440) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].norm1_context.linear, accessed_by=DictGetItemGuardAccessor(linear) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].norm1_context.linear, 140533118674224) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].norm1_context.linear.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[10].norm1_context.linear.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].norm1_context.linear.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].norm1_context.linear.training, 140591004393408) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].norm1_context.linear._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].norm1_context.linear.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].norm1_context.linear.lora_A, 140533118668320) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].norm1_context.linear.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].norm1_context.linear.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].norm1_context.linear.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].norm1_context.linear.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].norm1_context.linear.lora_A['default_0'], 140533116811680) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].norm1_context.linear.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].norm1_context.linear.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].norm1_context.linear.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].norm1_context.linear.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].norm1_context.linear.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].norm1_context.linear.lora_A['default_0'].weight, 140537314751920) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].norm1_context.linear.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].norm1_context.linear.lora_B, 140533118674032) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].norm1_context.linear.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].norm1_context.linear.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].norm1_context.linear.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].norm1_context.linear.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].norm1_context.linear.lora_B['default_0'], 140533116805872) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].norm1_context.linear.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].norm1_context.linear.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].norm1_context.linear.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].norm1_context.linear.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].norm1_context.linear.base_layer, 140581769889296) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].norm1_context.linear.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].norm1_context.linear.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].norm1_context.linear.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].norm1_context.linear.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].norm1_context.linear.lora_dropout, 140533118674176) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].norm1_context.linear.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].norm1_context.linear.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].norm1_context.linear.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].norm1_context.linear.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].norm1_context.linear.lora_dropout['default_0'], 140533118675328) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].norm1_context.linear.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].norm1_context.linear.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].norm1_context.linear.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].norm1_context.linear.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[10].norm1_context.linear.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[10].norm1_context.linear.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].norm1_context.linear.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[10].norm1_context.linear.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].norm1_context.linear.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[10].norm1_context.linear.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[10].norm1_context.linear.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].norm1_context.linear.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].norm1_context.linear.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].norm1_context.linear._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].norm1_context.linear._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].norm1_context.linear.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[10].norm1_context.linear.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[10].norm1_context.linear.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].norm1_context.linear._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].norm1_context.linear._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].norm1_context.linear._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].norm1_context.linear._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].norm1_context.linear._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[10].norm1_context.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[10].norm1_context.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].norm1_context._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].norm1_context._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].norm1_context._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].norm1_context._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].norm2_context, accessed_by=DictGetItemGuardAccessor(norm2_context) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].norm2_context, 140581769890352) # norm_encoder_hidden_states = self.norm2_context(encoder_hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:195 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].norm2_context.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].norm2_context.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].norm2_context.training, 140591004393440) # norm_encoder_hidden_states = self.norm2_context(encoder_hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:195 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | +- GuardManager: source=L['self'].transformer_blocks[11], accessed_by=GetItemGuardAccessor(11) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11], 140581769888624) # for index_block, block in enumerate(self.transformer_blocks): # diffusers/src/diffusers/models/transformers/transformer_flux.py:471 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[11].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].training, 140591004393440) # for index_block, block in enumerate(self.transformer_blocks): # diffusers/src/diffusers/models/transformers/transformer_flux.py:471 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff, accessed_by=DictGetItemGuardAccessor(ff) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].ff, 140581769892272) # ff_output = self.ff(norm_hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:185 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[11].ff.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].ff.training, 140591004393440) # ff_output = self.ff(norm_hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:185 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff.net, accessed_by=DictGetItemGuardAccessor(net) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].ff.net, 140581769892512) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[11].ff.net, 93831537618768) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- LENGTH_CHECK: len(L['self'].transformer_blocks[11].ff.net) == 3 # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff.net.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff.net.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].ff.net.training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff.net[0], accessed_by=GetItemGuardAccessor(0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].ff.net[0], 140581769892464) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff.net[0].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[11].ff.net[0].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff.net[0].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].ff.net[0].training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff.net[0]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff.net[0].proj, accessed_by=DictGetItemGuardAccessor(proj) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].ff.net[0].proj, 140533117187600) # hidden_states = self.proj(hidden_states) # diffusers/src/diffusers/models/activations.py:88 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff.net[0].proj.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[11].ff.net[0].proj.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff.net[0].proj.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].ff.net[0].proj.training, 140591004393408) # hidden_states = self.proj(hidden_states) # diffusers/src/diffusers/models/activations.py:88 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff.net[0].proj._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff.net[0].proj.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].ff.net[0].proj.lora_A, 140533117188752) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff.net[0].proj.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff.net[0].proj.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].ff.net[0].proj.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff.net[0].proj.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].ff.net[0].proj.lora_A['default_0'], 140533117326944) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff.net[0].proj.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff.net[0].proj.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].ff.net[0].proj.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff.net[0].proj.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff.net[0].proj.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].ff.net[0].proj.lora_A['default_0'].weight, 140537314159376) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff.net[0].proj.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].ff.net[0].proj.lora_B, 140533117189040) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff.net[0].proj.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff.net[0].proj.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].ff.net[0].proj.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff.net[0].proj.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].ff.net[0].proj.lora_B['default_0'], 140533117333808) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff.net[0].proj.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff.net[0].proj.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].ff.net[0].proj.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff.net[0].proj.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].ff.net[0].proj.base_layer, 140581769892560) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff.net[0].proj.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff.net[0].proj.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].ff.net[0].proj.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff.net[0].proj.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].ff.net[0].proj.lora_dropout, 140533117178864) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff.net[0].proj.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff.net[0].proj.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].ff.net[0].proj.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff.net[0].proj.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].ff.net[0].proj.lora_dropout['default_0'], 140533117174592) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff.net[0].proj.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff.net[0].proj.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].ff.net[0].proj.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff.net[0].proj.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[11].ff.net[0].proj.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[11].ff.net[0].proj.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff.net[0].proj.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[11].ff.net[0].proj.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff.net[0].proj.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[11].ff.net[0].proj.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[11].ff.net[0].proj.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff.net[0].proj.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].ff.net[0].proj.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff.net[0].proj._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff.net[0].proj._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff.net[0].proj.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[11].ff.net[0].proj.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[11].ff.net[0].proj.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff.net[0].proj._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].ff.net[0].proj._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff.net[0].proj._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff.net[0].proj._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff.net[0].proj._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[11].ff.net[0].proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[11].ff.net[0].proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff.net[0].approximate, accessed_by=DictGetItemGuardAccessor(approximate) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[11].ff.net[0].approximate == 'tanh' # return F.gelu(gate, approximate=self.approximate) # diffusers/src/diffusers/models/activations.py:83 in gelu V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff.net[0]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff.net[0]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff.net[0]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff.net[0]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff.net[1], accessed_by=GetItemGuardAccessor(1) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].ff.net[1], 140581769892608) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff.net[1].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff.net[1].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].ff.net[1].training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff.net[2], accessed_by=GetItemGuardAccessor(2) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].ff.net[2], 140533117334912) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff.net[2].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[11].ff.net[2].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff.net[2].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].ff.net[2].training, 140591004393408) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff.net[2]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff.net[2].lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].ff.net[2].lora_A, 140533117335344) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff.net[2].lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff.net[2].lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].ff.net[2].lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff.net[2].lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].ff.net[2].lora_A['default_0'], 140533117334624) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff.net[2].lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff.net[2].lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].ff.net[2].lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff.net[2].lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff.net[2].lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].ff.net[2].lora_A['default_0'].weight, 140537314148896) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff.net[2].lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].ff.net[2].lora_B, 140533117335968) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff.net[2].lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff.net[2].lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].ff.net[2].lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff.net[2].lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].ff.net[2].lora_B['default_0'], 140533117334336) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff.net[2].lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff.net[2].lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].ff.net[2].lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff.net[2].base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].ff.net[2].base_layer, 140581769892656) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff.net[2].base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff.net[2].base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].ff.net[2].base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff.net[2].lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].ff.net[2].lora_dropout, 140533117335536) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff.net[2].lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff.net[2].lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].ff.net[2].lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff.net[2].lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].ff.net[2].lora_dropout['default_0'], 140533117332272) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff.net[2].lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff.net[2].lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].ff.net[2].lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff.net[2].scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[11].ff.net[2].scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[11].ff.net[2].scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff.net[2].scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[11].ff.net[2].scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff.net[2].use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[11].ff.net[2].use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[11].ff.net[2].use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff.net[2].use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].ff.net[2].use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff.net[2]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff.net[2]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff.net[2].merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[11].ff.net[2].merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[11].ff.net[2].merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff.net[2]._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].ff.net[2]._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff.net[2]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff.net[2]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff.net[2]._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[11].ff.net[2]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[11].ff.net[2]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn, accessed_by=DictGetItemGuardAccessor(attn) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn, 140581769891504) # attn_output, context_attn_output = self.attn( # diffusers/src/diffusers/models/transformers/transformer_flux.py:172 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[11].attn.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.training, 140591004393440) # attn_output, context_attn_output = self.attn( # diffusers/src/diffusers/models/transformers/transformer_flux.py:172 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_k, accessed_by=DictGetItemGuardAccessor(to_k) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.to_k, 140533118125104) # key = attn.to_k(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1717 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[11].attn.to_k.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.to_k.training, 140591004393408) # key = attn.to_k(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1717 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_k._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_k.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.to_k.lora_A, 140533118126592) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_k.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_k.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.to_k.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_k.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.to_k.lora_A['default_0'], 140533118137728) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_k.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_k.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.to_k.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_k.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_k.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.to_k.lora_A['default_0'].weight, 140537314259040) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_k.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.to_k.lora_B, 140533118132976) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_k.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_k.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.to_k.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_k.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.to_k.lora_B['default_0'], 140533118135664) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_k.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_k.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.to_k.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_k.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.to_k.base_layer, 140581769891648) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_k.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_k.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.to_k.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_k.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.to_k.lora_dropout, 140533118136624) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_k.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_k.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.to_k.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_k.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.to_k.lora_dropout['default_0'], 140533118132928) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_k.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_k.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.to_k.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_k.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[11].attn.to_k.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[11].attn.to_k.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_k.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[11].attn.to_k.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_k.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[11].attn.to_k.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[11].attn.to_k.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_k.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.to_k.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_k._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_k._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_k.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[11].attn.to_k.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[11].attn.to_k.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_k._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.to_k._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_k._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_k._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_k._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[11].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[11].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_q, accessed_by=DictGetItemGuardAccessor(to_q) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.to_q, 140533117375712) # query = attn.to_q(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1716 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[11].attn.to_q.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.to_q.training, 140591004393408) # query = attn.to_q(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1716 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_q._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_q.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.to_q.lora_A, 140533118134128) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_q.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_q.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.to_q.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_q.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.to_q.lora_A['default_0'], 140533118133936) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_q.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_q.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.to_q.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_q.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_q.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.to_q.lora_A['default_0'].weight, 140537314476816) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_q.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.to_q.lora_B, 140533118134176) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_q.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_q.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.to_q.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_q.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.to_q.lora_B['default_0'], 140533118133600) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_q.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_q.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.to_q.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_q.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.to_q.base_layer, 140581769891744) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_q.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_q.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.to_q.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_q.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.to_q.lora_dropout, 140533118126400) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_q.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_q.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.to_q.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_q.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.to_q.lora_dropout['default_0'], 140533118125728) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_q.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_q.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.to_q.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_q.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[11].attn.to_q.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[11].attn.to_q.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_q.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[11].attn.to_q.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_q.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[11].attn.to_q.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[11].attn.to_q.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_q.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.to_q.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_q._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_q._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_q.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[11].attn.to_q.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[11].attn.to_q.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_q._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.to_q._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_q._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_q._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_q._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[11].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[11].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_v, accessed_by=DictGetItemGuardAccessor(to_v) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.to_v, 140533117022752) # value = attn.to_v(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1718 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_v.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[11].attn.to_v.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_v.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.to_v.training, 140591004393408) # value = attn.to_v(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1718 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_v._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_v.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.to_v.lora_A, 140533117026256) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_v.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_v.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.to_v.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_v.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.to_v.lora_A['default_0'], 140533117291824) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_v.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_v.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.to_v.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_v.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_v.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.to_v.lora_A['default_0'].weight, 140537314256320) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_v.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.to_v.lora_B, 140533117011808) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_v.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_v.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.to_v.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_v.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.to_v.lora_B['default_0'], 140533117300464) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_v.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_v.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.to_v.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_v.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.to_v.base_layer, 140581769891840) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_v.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_v.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.to_v.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_v.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.to_v.lora_dropout, 140533117015504) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_v.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_v.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.to_v.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_v.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.to_v.lora_dropout['default_0'], 140533117019392) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_v.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_v.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.to_v.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_v.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[11].attn.to_v.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[11].attn.to_v.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_v.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[11].attn.to_v.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_v.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[11].attn.to_v.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[11].attn.to_v.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_v.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.to_v.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_v._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_v._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_v.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[11].attn.to_v.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[11].attn.to_v.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_v._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.to_v._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_v._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_v._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_v._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[11].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[11].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.norm_k, accessed_by=DictGetItemGuardAccessor(norm_k) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.norm_k, 140581769891696) # if attn.norm_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1729 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.norm_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[11].attn.norm_k.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.norm_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.norm_k.training, 140591004393440) # if attn.norm_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1729 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.norm_k.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[11].attn.norm_k.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.norm_k._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.norm_k.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.norm_k.weight, 140581765994416) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.norm_k._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.norm_k._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.norm_k._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.norm_k._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.norm_q, accessed_by=DictGetItemGuardAccessor(norm_q) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.norm_q, 140581769891600) # if attn.norm_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1727 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.norm_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[11].attn.norm_q.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.norm_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.norm_q.training, 140591004393440) # if attn.norm_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1727 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.norm_q.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[11].attn.norm_q.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.norm_q._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.norm_q.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.norm_q.weight, 140581765994496) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.norm_q._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.norm_q._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.norm_q._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.norm_q._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_out, accessed_by=DictGetItemGuardAccessor(to_out) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.to_out, 140581769892032) # hidden_states = attn.to_out[0](hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1776 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_out.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_out.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.to_out.training, 140591004393440) # hidden_states = attn.to_out[0](hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1776 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_out[0], accessed_by=GetItemGuardAccessor(0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.to_out[0], 140533117033808) # hidden_states = attn.to_out[0](hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1776 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_out[0].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[11].attn.to_out[0].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_out[0].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.to_out[0].training, 140591004393408) # hidden_states = attn.to_out[0](hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1776 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_out[0]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_out[0].lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.to_out[0].lora_A, 140533117036400) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_out[0].lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_out[0].lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.to_out[0].lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_out[0].lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.to_out[0].lora_A['default_0'], 140533117743264) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_out[0].lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_out[0].lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.to_out[0].lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_out[0].lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_out[0].lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.to_out[0].lora_A['default_0'].weight, 140537314250240) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_out[0].lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.to_out[0].lora_B, 140533117739232) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_out[0].lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_out[0].lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.to_out[0].lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_out[0].lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.to_out[0].lora_B['default_0'], 140533117743552) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_out[0].lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_out[0].lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.to_out[0].lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_out[0].base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.to_out[0].base_layer, 140581769892080) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_out[0].base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_out[0].base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.to_out[0].base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_out[0].lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.to_out[0].lora_dropout, 140533117035872) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_out[0].lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_out[0].lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.to_out[0].lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_out[0].lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.to_out[0].lora_dropout['default_0'], 140533117037936) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_out[0].lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_out[0].lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.to_out[0].lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_out[0].scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[11].attn.to_out[0].scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[11].attn.to_out[0].scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_out[0].scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[11].attn.to_out[0].scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_out[0].use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[11].attn.to_out[0].use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[11].attn.to_out[0].use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_out[0].use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.to_out[0].use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_out[0]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_out[0]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_out[0].merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[11].attn.to_out[0].merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[11].attn.to_out[0].merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_out[0]._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.to_out[0]._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_out[0]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_out[0]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_out[0]._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[11].attn.to_out[0]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[11].attn.to_out[0]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_out[1], accessed_by=GetItemGuardAccessor(1) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.to_out[1], 140581769892128) # hidden_states = attn.to_out[1](hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1778 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_out[1].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_out[1].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.to_out[1].training, 140591004393440) # hidden_states = attn.to_out[1](hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1778 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.add_k_proj, accessed_by=DictGetItemGuardAccessor(add_k_proj) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.add_k_proj, 140533117300512) # encoder_hidden_states_key_proj = attn.add_k_proj(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1736 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.add_k_proj.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[11].attn.add_k_proj.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.add_k_proj.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.add_k_proj.training, 140591004393408) # encoder_hidden_states_key_proj = attn.add_k_proj(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1736 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.add_k_proj._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.add_k_proj.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.add_k_proj.lora_A, 140533117299168) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.add_k_proj.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.add_k_proj.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.add_k_proj.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.add_k_proj.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.add_k_proj.lora_A['default_0'], 140533117304064) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.add_k_proj.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.add_k_proj.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.add_k_proj.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.add_k_proj.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.add_k_proj.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.add_k_proj.lora_A['default_0'].weight, 140537314259840) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.add_k_proj.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.add_k_proj.lora_B, 140533117299552) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.add_k_proj.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.add_k_proj.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.add_k_proj.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.add_k_proj.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.add_k_proj.lora_B['default_0'], 140533117292544) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.add_k_proj.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.add_k_proj.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.add_k_proj.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.add_k_proj.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.add_k_proj.base_layer, 140581769891888) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.add_k_proj.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.add_k_proj.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.add_k_proj.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.add_k_proj.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.add_k_proj.lora_dropout, 140533117300848) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.add_k_proj.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.add_k_proj.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.add_k_proj.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.add_k_proj.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.add_k_proj.lora_dropout['default_0'], 140533117290048) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.add_k_proj.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.add_k_proj.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.add_k_proj.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.add_k_proj.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[11].attn.add_k_proj.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[11].attn.add_k_proj.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.add_k_proj.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[11].attn.add_k_proj.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.add_k_proj.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[11].attn.add_k_proj.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[11].attn.add_k_proj.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.add_k_proj.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.add_k_proj.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.add_k_proj._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.add_k_proj._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.add_k_proj.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[11].attn.add_k_proj.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[11].attn.add_k_proj.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.add_k_proj._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.add_k_proj._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.add_k_proj._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.add_k_proj._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.add_k_proj._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[11].attn.add_k_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[11].attn.add_k_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.add_q_proj, accessed_by=DictGetItemGuardAccessor(add_q_proj) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.add_q_proj, 140533117298112) # encoder_hidden_states_query_proj = attn.add_q_proj(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1735 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.add_q_proj.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[11].attn.add_q_proj.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.add_q_proj.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.add_q_proj.training, 140591004393408) # encoder_hidden_states_query_proj = attn.add_q_proj(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1735 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.add_q_proj._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.add_q_proj.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.add_q_proj.lora_A, 140533117028000) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.add_q_proj.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.add_q_proj.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.add_q_proj.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.add_q_proj.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.add_q_proj.lora_A['default_0'], 140533117035248) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.add_q_proj.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.add_q_proj.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.add_q_proj.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.add_q_proj.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.add_q_proj.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.add_q_proj.lora_A['default_0'].weight, 140537314247600) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.add_q_proj.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.add_q_proj.lora_B, 140533117038272) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.add_q_proj.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.add_q_proj.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.add_q_proj.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.add_q_proj.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.add_q_proj.lora_B['default_0'], 140533117035008) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.add_q_proj.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.add_q_proj.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.add_q_proj.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.add_q_proj.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.add_q_proj.base_layer, 140581769891984) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.add_q_proj.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.add_q_proj.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.add_q_proj.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.add_q_proj.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.add_q_proj.lora_dropout, 140533117300320) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.add_q_proj.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.add_q_proj.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.add_q_proj.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.add_q_proj.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.add_q_proj.lora_dropout['default_0'], 140533117298784) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.add_q_proj.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.add_q_proj.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.add_q_proj.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.add_q_proj.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[11].attn.add_q_proj.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[11].attn.add_q_proj.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.add_q_proj.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[11].attn.add_q_proj.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.add_q_proj.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[11].attn.add_q_proj.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[11].attn.add_q_proj.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.add_q_proj.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.add_q_proj.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.add_q_proj._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.add_q_proj._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.add_q_proj.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[11].attn.add_q_proj.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[11].attn.add_q_proj.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.add_q_proj._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.add_q_proj._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.add_q_proj._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.add_q_proj._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.add_q_proj._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[11].attn.add_q_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[11].attn.add_q_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.add_v_proj, accessed_by=DictGetItemGuardAccessor(add_v_proj) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.add_v_proj, 140533117289136) # encoder_hidden_states_value_proj = attn.add_v_proj(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1737 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.add_v_proj.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[11].attn.add_v_proj.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.add_v_proj.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.add_v_proj.training, 140591004393408) # encoder_hidden_states_value_proj = attn.add_v_proj(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1737 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.add_v_proj._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.add_v_proj.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.add_v_proj.lora_A, 140533117293216) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.add_v_proj.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.add_v_proj.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.add_v_proj.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.add_v_proj.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.add_v_proj.lora_A['default_0'], 140533117290912) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.add_v_proj.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.add_v_proj.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.add_v_proj.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.add_v_proj.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.add_v_proj.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.add_v_proj.lora_A['default_0'].weight, 140537314258480) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.add_v_proj.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.add_v_proj.lora_B, 140533117300560) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.add_v_proj.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.add_v_proj.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.add_v_proj.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.add_v_proj.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.add_v_proj.lora_B['default_0'], 140533117297152) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.add_v_proj.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.add_v_proj.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.add_v_proj.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.add_v_proj.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.add_v_proj.base_layer, 140581769891936) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.add_v_proj.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.add_v_proj.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.add_v_proj.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.add_v_proj.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.add_v_proj.lora_dropout, 140533117297920) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.add_v_proj.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.add_v_proj.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.add_v_proj.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.add_v_proj.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.add_v_proj.lora_dropout['default_0'], 140533117291200) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.add_v_proj.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.add_v_proj.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.add_v_proj.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.add_v_proj.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[11].attn.add_v_proj.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[11].attn.add_v_proj.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.add_v_proj.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[11].attn.add_v_proj.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.add_v_proj.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[11].attn.add_v_proj.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[11].attn.add_v_proj.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.add_v_proj.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.add_v_proj.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.add_v_proj._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.add_v_proj._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.add_v_proj.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[11].attn.add_v_proj.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[11].attn.add_v_proj.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.add_v_proj._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.add_v_proj._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.add_v_proj._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.add_v_proj._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.add_v_proj._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[11].attn.add_v_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[11].attn.add_v_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_add_out, accessed_by=DictGetItemGuardAccessor(to_add_out) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.to_add_out, 140533116940016) # encoder_hidden_states = attn.to_add_out(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1779 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_add_out.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[11].attn.to_add_out.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_add_out.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.to_add_out.training, 140591004393408) # encoder_hidden_states = attn.to_add_out(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1779 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_add_out._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_add_out.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.to_add_out.lora_A, 140533117176464) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_add_out.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_add_out.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.to_add_out.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_add_out.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.to_add_out.lora_A['default_0'], 140533117186352) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_add_out.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_add_out.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.to_add_out.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_add_out.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_add_out.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.to_add_out.lora_A['default_0'].weight, 140537314150976) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_add_out.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.to_add_out.lora_B, 140533117182464) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_add_out.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_add_out.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.to_add_out.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_add_out.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.to_add_out.lora_B['default_0'], 140533117184864) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_add_out.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_add_out.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.to_add_out.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_add_out.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.to_add_out.base_layer, 140581769892176) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_add_out.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_add_out.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.to_add_out.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_add_out.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.to_add_out.lora_dropout, 140533117177328) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_add_out.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_add_out.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.to_add_out.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_add_out.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.to_add_out.lora_dropout['default_0'], 140533117175936) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_add_out.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_add_out.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.to_add_out.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_add_out.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[11].attn.to_add_out.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[11].attn.to_add_out.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_add_out.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[11].attn.to_add_out.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_add_out.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[11].attn.to_add_out.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[11].attn.to_add_out.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_add_out.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.to_add_out.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_add_out._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_add_out._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_add_out.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[11].attn.to_add_out.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[11].attn.to_add_out.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_add_out._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.to_add_out._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_add_out._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_add_out._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_add_out._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[11].attn.to_add_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[11].attn.to_add_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.norm_added_k, accessed_by=DictGetItemGuardAccessor(norm_added_k) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.norm_added_k, 140581769892320) # if attn.norm_added_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1751 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.norm_added_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[11].attn.norm_added_k.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.norm_added_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.norm_added_k.training, 140591004393440) # if attn.norm_added_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1751 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.norm_added_k.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[11].attn.norm_added_k.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.norm_added_k._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.norm_added_k.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.norm_added_k.weight, 140581765994256) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.norm_added_k._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.norm_added_k._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.norm_added_k._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.norm_added_k._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.norm_added_q, accessed_by=DictGetItemGuardAccessor(norm_added_q) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.norm_added_q, 140581769892224) # if attn.norm_added_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1749 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.norm_added_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[11].attn.norm_added_q.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.norm_added_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.norm_added_q.training, 140591004393440) # if attn.norm_added_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1749 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.norm_added_q.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[11].attn.norm_added_q.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.norm_added_q._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.norm_added_q.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.norm_added_q.weight, 140581765994336) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.norm_added_q._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.norm_added_q._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.norm_added_q._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.norm_added_q._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.heads, accessed_by=DictGetItemGuardAccessor(heads) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[11].attn.heads == 24 # head_dim = inner_dim // attn.heads # diffusers/src/diffusers/models/attention_processor.py:1721 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.processor, accessed_by=DictGetItemGuardAccessor(processor) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[11].attn.processor, 93831581524080) # attn_parameters = set(inspect.signature(self.processor.__call__).parameters.keys()) # diffusers/src/diffusers/models/attention_processor.py:479 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.processor, 140581769891456) # return self.processor( # diffusers/src/diffusers/models/attention_processor.py:490 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].norm1, accessed_by=DictGetItemGuardAccessor(norm1) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].norm1, 140581769891024) # norm_hidden_states, gate_msa, shift_mlp, scale_mlp, gate_mlp = self.norm1(hidden_states, emb=temb) # diffusers/src/diffusers/models/transformers/transformer_flux.py:165 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].norm1.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[11].norm1.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].norm1.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].norm1.training, 140591004393440) # norm_hidden_states, gate_msa, shift_mlp, scale_mlp, gate_mlp = self.norm1(hidden_states, emb=temb) # diffusers/src/diffusers/models/transformers/transformer_flux.py:165 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].norm1.emb, accessed_by=DictGetItemGuardAccessor(emb) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].norm1.emb, 140591004478624) # if self.emb is not None: # diffusers/src/diffusers/models/normalization.py:135 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].norm1._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].norm1.norm, accessed_by=DictGetItemGuardAccessor(norm) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].norm1.norm, 140581769891168) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:139 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].norm1.norm.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].norm1.norm.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].norm1.norm.training, 140591004393440) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:139 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].norm1.silu, accessed_by=DictGetItemGuardAccessor(silu) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].norm1.silu, 140581769891072) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].norm1.silu.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].norm1.silu.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].norm1.silu.training, 140591004393440) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].norm1.linear, accessed_by=DictGetItemGuardAccessor(linear) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].norm1.linear, 140533116934160) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].norm1.linear.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[11].norm1.linear.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].norm1.linear.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].norm1.linear.training, 140591004393408) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].norm1.linear._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].norm1.linear.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].norm1.linear.lora_A, 140533116933584) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].norm1.linear.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].norm1.linear.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].norm1.linear.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].norm1.linear.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].norm1.linear.lora_A['default_0'], 140533116931904) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].norm1.linear.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].norm1.linear.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].norm1.linear.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].norm1.linear.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].norm1.linear.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].norm1.linear.lora_A['default_0'].weight, 140537314486576) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].norm1.linear.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].norm1.linear.lora_B, 140533116930752) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].norm1.linear.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].norm1.linear.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].norm1.linear.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].norm1.linear.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].norm1.linear.lora_B['default_0'], 140533116931760) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].norm1.linear.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].norm1.linear.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].norm1.linear.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].norm1.linear.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].norm1.linear.base_layer, 140581769891120) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].norm1.linear.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].norm1.linear.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].norm1.linear.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].norm1.linear.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].norm1.linear.lora_dropout, 140533116933824) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].norm1.linear.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].norm1.linear.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].norm1.linear.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].norm1.linear.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].norm1.linear.lora_dropout['default_0'], 140533116934352) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].norm1.linear.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].norm1.linear.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].norm1.linear.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].norm1.linear.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[11].norm1.linear.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[11].norm1.linear.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].norm1.linear.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[11].norm1.linear.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].norm1.linear.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[11].norm1.linear.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[11].norm1.linear.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].norm1.linear.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].norm1.linear.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].norm1.linear._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].norm1.linear._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].norm1.linear.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[11].norm1.linear.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[11].norm1.linear.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].norm1.linear._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].norm1.linear._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].norm1.linear._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].norm1.linear._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].norm1.linear._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[11].norm1.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[11].norm1.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].norm1._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].norm1._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].norm1._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].norm1._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].norm2, accessed_by=DictGetItemGuardAccessor(norm2) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].norm2, 140581769892368) # norm_hidden_states = self.norm2(hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:182 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].norm2.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].norm2.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].norm2.training, 140591004393440) # norm_hidden_states = self.norm2(hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:182 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff_context, accessed_by=DictGetItemGuardAccessor(ff_context) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].ff_context, 140581769892704) # context_ff_output = self.ff_context(norm_encoder_hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:198 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff_context.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[11].ff_context.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff_context.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].ff_context.training, 140591004393440) # context_ff_output = self.ff_context(norm_encoder_hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:198 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff_context._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff_context.net, accessed_by=DictGetItemGuardAccessor(net) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].ff_context.net, 140581769892848) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[11].ff_context.net, 93831537618768) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- LENGTH_CHECK: len(L['self'].transformer_blocks[11].ff_context.net) == 3 # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff_context.net.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff_context.net.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].ff_context.net.training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff_context.net[0], accessed_by=GetItemGuardAccessor(0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].ff_context.net[0], 140581769892800) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff_context.net[0].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[11].ff_context.net[0].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff_context.net[0].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].ff_context.net[0].training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff_context.net[0]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff_context.net[0].proj, accessed_by=DictGetItemGuardAccessor(proj) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].ff_context.net[0].proj, 140533117335200) # hidden_states = self.proj(hidden_states) # diffusers/src/diffusers/models/activations.py:88 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff_context.net[0].proj.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[11].ff_context.net[0].proj.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff_context.net[0].proj.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].ff_context.net[0].proj.training, 140591004393408) # hidden_states = self.proj(hidden_states) # diffusers/src/diffusers/models/activations.py:88 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff_context.net[0].proj._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff_context.net[0].proj.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].ff_context.net[0].proj.lora_A, 140533117323248) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff_context.net[0].proj.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff_context.net[0].proj.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].ff_context.net[0].proj.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff_context.net[0].proj.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].ff_context.net[0].proj.lora_A['default_0'], 140533117324208) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff_context.net[0].proj.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff_context.net[0].proj.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].ff_context.net[0].proj.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff_context.net[0].proj.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff_context.net[0].proj.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].ff_context.net[0].proj.lora_A['default_0'].weight, 140537314164016) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff_context.net[0].proj.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].ff_context.net[0].proj.lora_B, 140533117333184) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff_context.net[0].proj.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff_context.net[0].proj.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].ff_context.net[0].proj.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff_context.net[0].proj.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].ff_context.net[0].proj.lora_B['default_0'], 140533117323968) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff_context.net[0].proj.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff_context.net[0].proj.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].ff_context.net[0].proj.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff_context.net[0].proj.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].ff_context.net[0].proj.base_layer, 140581769892896) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff_context.net[0].proj.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff_context.net[0].proj.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].ff_context.net[0].proj.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff_context.net[0].proj.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].ff_context.net[0].proj.lora_dropout, 140533117324064) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff_context.net[0].proj.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff_context.net[0].proj.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].ff_context.net[0].proj.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff_context.net[0].proj.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].ff_context.net[0].proj.lora_dropout['default_0'], 140533117334528) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff_context.net[0].proj.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff_context.net[0].proj.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].ff_context.net[0].proj.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff_context.net[0].proj.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[11].ff_context.net[0].proj.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[11].ff_context.net[0].proj.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff_context.net[0].proj.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[11].ff_context.net[0].proj.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff_context.net[0].proj.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[11].ff_context.net[0].proj.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[11].ff_context.net[0].proj.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff_context.net[0].proj.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].ff_context.net[0].proj.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff_context.net[0].proj._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff_context.net[0].proj._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff_context.net[0].proj.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[11].ff_context.net[0].proj.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[11].ff_context.net[0].proj.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff_context.net[0].proj._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].ff_context.net[0].proj._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff_context.net[0].proj._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff_context.net[0].proj._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff_context.net[0].proj._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[11].ff_context.net[0].proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[11].ff_context.net[0].proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff_context.net[0].approximate, accessed_by=DictGetItemGuardAccessor(approximate) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[11].ff_context.net[0].approximate == 'tanh' # return F.gelu(gate, approximate=self.approximate) # diffusers/src/diffusers/models/activations.py:83 in gelu V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff_context.net[0]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff_context.net[0]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff_context.net[0]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff_context.net[0]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff_context.net[1], accessed_by=GetItemGuardAccessor(1) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].ff_context.net[1], 140581769892992) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff_context.net[1].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff_context.net[1].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].ff_context.net[1].training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff_context.net[2], accessed_by=GetItemGuardAccessor(2) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].ff_context.net[2], 140533117332992) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff_context.net[2].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[11].ff_context.net[2].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff_context.net[2].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].ff_context.net[2].training, 140591004393408) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff_context.net[2]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff_context.net[2].lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].ff_context.net[2].lora_A, 140533117323872) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff_context.net[2].lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff_context.net[2].lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].ff_context.net[2].lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff_context.net[2].lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].ff_context.net[2].lora_A['default_0'], 140533117333904) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff_context.net[2].lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff_context.net[2].lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].ff_context.net[2].lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff_context.net[2].lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff_context.net[2].lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].ff_context.net[2].lora_A['default_0'].weight, 140537314161136) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff_context.net[2].lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].ff_context.net[2].lora_B, 140533117333040) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff_context.net[2].lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff_context.net[2].lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].ff_context.net[2].lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff_context.net[2].lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].ff_context.net[2].lora_B['default_0'], 140533117335008) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff_context.net[2].lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff_context.net[2].lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].ff_context.net[2].lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff_context.net[2].base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].ff_context.net[2].base_layer, 140581769893040) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff_context.net[2].base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff_context.net[2].base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].ff_context.net[2].base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff_context.net[2].lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].ff_context.net[2].lora_dropout, 140533117324880) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff_context.net[2].lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff_context.net[2].lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].ff_context.net[2].lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff_context.net[2].lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].ff_context.net[2].lora_dropout['default_0'], 140533117333520) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff_context.net[2].lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff_context.net[2].lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].ff_context.net[2].lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff_context.net[2].scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[11].ff_context.net[2].scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[11].ff_context.net[2].scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff_context.net[2].scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[11].ff_context.net[2].scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff_context.net[2].use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[11].ff_context.net[2].use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[11].ff_context.net[2].use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff_context.net[2].use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].ff_context.net[2].use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff_context.net[2]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff_context.net[2]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff_context.net[2].merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[11].ff_context.net[2].merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[11].ff_context.net[2].merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff_context.net[2]._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].ff_context.net[2]._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff_context.net[2]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff_context.net[2]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff_context.net[2]._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[11].ff_context.net[2]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[11].ff_context.net[2]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff_context._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff_context._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff_context._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff_context._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].norm1_context, accessed_by=DictGetItemGuardAccessor(norm1_context) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].norm1_context, 140581769891216) # norm_encoder_hidden_states, c_gate_msa, c_shift_mlp, c_scale_mlp, c_gate_mlp = self.norm1_context( # diffusers/src/diffusers/models/transformers/transformer_flux.py:167 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].norm1_context.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[11].norm1_context.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].norm1_context.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].norm1_context.training, 140591004393440) # norm_encoder_hidden_states, c_gate_msa, c_shift_mlp, c_scale_mlp, c_gate_mlp = self.norm1_context( # diffusers/src/diffusers/models/transformers/transformer_flux.py:167 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].norm1_context.emb, accessed_by=DictGetItemGuardAccessor(emb) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].norm1_context.emb, 140591004478624) # if self.emb is not None: # diffusers/src/diffusers/models/normalization.py:135 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].norm1_context._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].norm1_context.norm, accessed_by=DictGetItemGuardAccessor(norm) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].norm1_context.norm, 140581769891408) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:139 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].norm1_context.norm.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].norm1_context.norm.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].norm1_context.norm.training, 140591004393440) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:139 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].norm1_context.silu, accessed_by=DictGetItemGuardAccessor(silu) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].norm1_context.silu, 140581769891312) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].norm1_context.silu.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].norm1_context.silu.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].norm1_context.silu.training, 140591004393440) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].norm1_context.linear, accessed_by=DictGetItemGuardAccessor(linear) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].norm1_context.linear, 140533116931376) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].norm1_context.linear.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[11].norm1_context.linear.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].norm1_context.linear.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].norm1_context.linear.training, 140591004393408) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].norm1_context.linear._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].norm1_context.linear.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].norm1_context.linear.lora_A, 140533116931184) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].norm1_context.linear.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].norm1_context.linear.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].norm1_context.linear.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].norm1_context.linear.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].norm1_context.linear.lora_A['default_0'], 140533116943520) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].norm1_context.linear.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].norm1_context.linear.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].norm1_context.linear.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].norm1_context.linear.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].norm1_context.linear.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].norm1_context.linear.lora_A['default_0'].weight, 140537314479296) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].norm1_context.linear.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].norm1_context.linear.lora_B, 140533116929696) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].norm1_context.linear.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].norm1_context.linear.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].norm1_context.linear.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].norm1_context.linear.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].norm1_context.linear.lora_B['default_0'], 140533116929360) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].norm1_context.linear.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].norm1_context.linear.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].norm1_context.linear.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].norm1_context.linear.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].norm1_context.linear.base_layer, 140581769891360) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].norm1_context.linear.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].norm1_context.linear.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].norm1_context.linear.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].norm1_context.linear.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].norm1_context.linear.lora_dropout, 140533116931424) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].norm1_context.linear.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].norm1_context.linear.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].norm1_context.linear.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].norm1_context.linear.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].norm1_context.linear.lora_dropout['default_0'], 140533116930704) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].norm1_context.linear.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].norm1_context.linear.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].norm1_context.linear.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].norm1_context.linear.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[11].norm1_context.linear.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[11].norm1_context.linear.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].norm1_context.linear.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[11].norm1_context.linear.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].norm1_context.linear.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[11].norm1_context.linear.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[11].norm1_context.linear.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].norm1_context.linear.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].norm1_context.linear.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].norm1_context.linear._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].norm1_context.linear._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].norm1_context.linear.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[11].norm1_context.linear.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[11].norm1_context.linear.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].norm1_context.linear._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].norm1_context.linear._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].norm1_context.linear._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].norm1_context.linear._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].norm1_context.linear._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[11].norm1_context.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[11].norm1_context.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].norm1_context._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].norm1_context._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].norm1_context._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].norm1_context._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].norm2_context, accessed_by=DictGetItemGuardAccessor(norm2_context) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].norm2_context, 140581769892416) # norm_encoder_hidden_states = self.norm2_context(encoder_hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:195 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].norm2_context.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].norm2_context.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].norm2_context.training, 140591004393440) # norm_encoder_hidden_states = self.norm2_context(encoder_hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:195 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | +- GuardManager: source=L['self'].transformer_blocks[12], accessed_by=GetItemGuardAccessor(12) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12], 140581769890688) # for index_block, block in enumerate(self.transformer_blocks): # diffusers/src/diffusers/models/transformers/transformer_flux.py:471 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[12].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].training, 140591004393440) # for index_block, block in enumerate(self.transformer_blocks): # diffusers/src/diffusers/models/transformers/transformer_flux.py:471 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff, accessed_by=DictGetItemGuardAccessor(ff) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].ff, 140581769894336) # ff_output = self.ff(norm_hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:185 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[12].ff.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].ff.training, 140591004393440) # ff_output = self.ff(norm_hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:185 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff.net, accessed_by=DictGetItemGuardAccessor(net) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].ff.net, 140581769894576) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[12].ff.net, 93831537618768) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- LENGTH_CHECK: len(L['self'].transformer_blocks[12].ff.net) == 3 # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff.net.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff.net.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].ff.net.training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff.net[0], accessed_by=GetItemGuardAccessor(0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].ff.net[0], 140581769894528) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff.net[0].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[12].ff.net[0].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff.net[0].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].ff.net[0].training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff.net[0]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff.net[0].proj, accessed_by=DictGetItemGuardAccessor(proj) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].ff.net[0].proj, 140533115902160) # hidden_states = self.proj(hidden_states) # diffusers/src/diffusers/models/activations.py:88 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff.net[0].proj.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[12].ff.net[0].proj.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff.net[0].proj.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].ff.net[0].proj.training, 140591004393408) # hidden_states = self.proj(hidden_states) # diffusers/src/diffusers/models/activations.py:88 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff.net[0].proj._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff.net[0].proj.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].ff.net[0].proj.lora_A, 140533117671872) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff.net[0].proj.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff.net[0].proj.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].ff.net[0].proj.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff.net[0].proj.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].ff.net[0].proj.lora_A['default_0'], 140533117672784) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff.net[0].proj.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff.net[0].proj.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].ff.net[0].proj.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff.net[0].proj.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff.net[0].proj.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].ff.net[0].proj.lora_A['default_0'].weight, 140537315946592) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff.net[0].proj.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].ff.net[0].proj.lora_B, 140533117671344) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff.net[0].proj.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff.net[0].proj.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].ff.net[0].proj.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff.net[0].proj.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].ff.net[0].proj.lora_B['default_0'], 140533117672256) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff.net[0].proj.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff.net[0].proj.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].ff.net[0].proj.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff.net[0].proj.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].ff.net[0].proj.base_layer, 140581769894624) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff.net[0].proj.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff.net[0].proj.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].ff.net[0].proj.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff.net[0].proj.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].ff.net[0].proj.lora_dropout, 140533115902976) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff.net[0].proj.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff.net[0].proj.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].ff.net[0].proj.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff.net[0].proj.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].ff.net[0].proj.lora_dropout['default_0'], 140533115909696) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff.net[0].proj.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff.net[0].proj.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].ff.net[0].proj.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff.net[0].proj.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[12].ff.net[0].proj.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[12].ff.net[0].proj.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff.net[0].proj.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[12].ff.net[0].proj.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff.net[0].proj.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[12].ff.net[0].proj.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[12].ff.net[0].proj.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff.net[0].proj.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].ff.net[0].proj.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff.net[0].proj._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff.net[0].proj._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff.net[0].proj.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[12].ff.net[0].proj.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[12].ff.net[0].proj.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff.net[0].proj._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].ff.net[0].proj._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff.net[0].proj._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff.net[0].proj._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff.net[0].proj._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[12].ff.net[0].proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[12].ff.net[0].proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff.net[0].approximate, accessed_by=DictGetItemGuardAccessor(approximate) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[12].ff.net[0].approximate == 'tanh' # return F.gelu(gate, approximate=self.approximate) # diffusers/src/diffusers/models/activations.py:83 in gelu V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff.net[0]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff.net[0]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff.net[0]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff.net[0]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff.net[1], accessed_by=GetItemGuardAccessor(1) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].ff.net[1], 140581769894672) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff.net[1].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff.net[1].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].ff.net[1].training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff.net[2], accessed_by=GetItemGuardAccessor(2) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].ff.net[2], 140533117667408) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff.net[2].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[12].ff.net[2].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff.net[2].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].ff.net[2].training, 140591004393408) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff.net[2]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff.net[2].lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].ff.net[2].lora_A, 140533117670336) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff.net[2].lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff.net[2].lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].ff.net[2].lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff.net[2].lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].ff.net[2].lora_A['default_0'], 140533117667120) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff.net[2].lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff.net[2].lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].ff.net[2].lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff.net[2].lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff.net[2].lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].ff.net[2].lora_A['default_0'].weight, 140537315944032) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff.net[2].lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].ff.net[2].lora_B, 140533117668464) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff.net[2].lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff.net[2].lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].ff.net[2].lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff.net[2].lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].ff.net[2].lora_B['default_0'], 140533117672832) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff.net[2].lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff.net[2].lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].ff.net[2].lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff.net[2].base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].ff.net[2].base_layer, 140581769894720) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff.net[2].base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff.net[2].base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].ff.net[2].base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff.net[2].lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].ff.net[2].lora_dropout, 140533117670048) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff.net[2].lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff.net[2].lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].ff.net[2].lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff.net[2].lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].ff.net[2].lora_dropout['default_0'], 140533117669712) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff.net[2].lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff.net[2].lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].ff.net[2].lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff.net[2].scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[12].ff.net[2].scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[12].ff.net[2].scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff.net[2].scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[12].ff.net[2].scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff.net[2].use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[12].ff.net[2].use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[12].ff.net[2].use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff.net[2].use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].ff.net[2].use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff.net[2]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff.net[2]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff.net[2].merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[12].ff.net[2].merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[12].ff.net[2].merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff.net[2]._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].ff.net[2]._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff.net[2]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff.net[2]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff.net[2]._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[12].ff.net[2]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[12].ff.net[2]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn, accessed_by=DictGetItemGuardAccessor(attn) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn, 140581769893568) # attn_output, context_attn_output = self.attn( # diffusers/src/diffusers/models/transformers/transformer_flux.py:172 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[12].attn.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.training, 140591004393440) # attn_output, context_attn_output = self.attn( # diffusers/src/diffusers/models/transformers/transformer_flux.py:172 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_k, accessed_by=DictGetItemGuardAccessor(to_k) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.to_k, 140533117444560) # key = attn.to_k(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1717 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[12].attn.to_k.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.to_k.training, 140591004393408) # key = attn.to_k(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1717 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_k._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_k.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.to_k.lora_A, 140533117450272) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_k.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_k.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.to_k.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_k.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.to_k.lora_A['default_0'], 140533117447632) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_k.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_k.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.to_k.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_k.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_k.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.to_k.lora_A['default_0'].weight, 140537316121376) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_k.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.to_k.lora_B, 140533117450608) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_k.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_k.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.to_k.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_k.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.to_k.lora_B['default_0'], 140533117449888) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_k.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_k.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.to_k.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_k.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.to_k.base_layer, 140581769893712) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_k.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_k.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.to_k.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_k.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.to_k.lora_dropout, 140533117443216) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_k.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_k.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.to_k.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_k.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.to_k.lora_dropout['default_0'], 140533117444272) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_k.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_k.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.to_k.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_k.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[12].attn.to_k.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[12].attn.to_k.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_k.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[12].attn.to_k.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_k.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[12].attn.to_k.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[12].attn.to_k.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_k.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.to_k.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_k._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_k._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_k.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[12].attn.to_k.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[12].attn.to_k.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_k._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.to_k._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_k._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_k._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_k._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[12].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[12].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_q, accessed_by=DictGetItemGuardAccessor(to_q) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.to_q, 140533117441776) # query = attn.to_q(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1716 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[12].attn.to_q.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.to_q.training, 140591004393408) # query = attn.to_q(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1716 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_q._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_q.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.to_q.lora_A, 140533117450992) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_q.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_q.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.to_q.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_q.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.to_q.lora_A['default_0'], 140533117437264) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_q.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_q.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.to_q.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_q.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_q.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.to_q.lora_A['default_0'].weight, 140537316124176) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_q.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.to_q.lora_B, 140533117449840) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_q.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_q.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.to_q.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_q.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.to_q.lora_B['default_0'], 140533117443360) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_q.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_q.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.to_q.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_q.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.to_q.base_layer, 140581769893808) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_q.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_q.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.to_q.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_q.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.to_q.lora_dropout, 140533117451856) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_q.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_q.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.to_q.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_q.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.to_q.lora_dropout['default_0'], 140533117437552) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_q.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_q.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.to_q.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_q.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[12].attn.to_q.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[12].attn.to_q.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_q.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[12].attn.to_q.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_q.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[12].attn.to_q.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[12].attn.to_q.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_q.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.to_q.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_q._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_q._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_q.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[12].attn.to_q.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[12].attn.to_q.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_q._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.to_q._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_q._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_q._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_q._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[12].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[12].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_v, accessed_by=DictGetItemGuardAccessor(to_v) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.to_v, 140533117450416) # value = attn.to_v(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1718 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_v.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[12].attn.to_v.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_v.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.to_v.training, 140591004393408) # value = attn.to_v(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1718 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_v._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_v.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.to_v.lora_A, 140533117442304) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_v.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_v.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.to_v.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_v.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.to_v.lora_A['default_0'], 140533117511200) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_v.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_v.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.to_v.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_v.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_v.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.to_v.lora_A['default_0'].weight, 140537316126496) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_v.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.to_v.lora_B, 140533117449312) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_v.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_v.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.to_v.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_v.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.to_v.lora_B['default_0'], 140533117513072) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_v.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_v.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.to_v.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_v.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.to_v.base_layer, 140581769893904) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_v.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_v.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.to_v.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_v.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.to_v.lora_dropout, 140533117449552) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_v.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_v.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.to_v.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_v.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.to_v.lora_dropout['default_0'], 140533117450656) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_v.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_v.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.to_v.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_v.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[12].attn.to_v.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[12].attn.to_v.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_v.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[12].attn.to_v.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_v.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[12].attn.to_v.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[12].attn.to_v.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_v.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.to_v.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_v._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_v._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_v.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[12].attn.to_v.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[12].attn.to_v.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_v._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.to_v._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_v._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_v._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_v._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[12].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[12].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.norm_k, accessed_by=DictGetItemGuardAccessor(norm_k) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.norm_k, 140581769893760) # if attn.norm_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1729 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.norm_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[12].attn.norm_k.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.norm_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.norm_k.training, 140591004393440) # if attn.norm_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1729 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.norm_k.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[12].attn.norm_k.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.norm_k._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.norm_k.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.norm_k.weight, 140581765995376) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.norm_k._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.norm_k._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.norm_k._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.norm_k._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.norm_q, accessed_by=DictGetItemGuardAccessor(norm_q) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.norm_q, 140581769893664) # if attn.norm_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1727 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.norm_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[12].attn.norm_q.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.norm_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.norm_q.training, 140591004393440) # if attn.norm_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1727 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.norm_q.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[12].attn.norm_q.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.norm_q._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.norm_q.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.norm_q.weight, 140581765995696) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.norm_q._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.norm_q._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.norm_q._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.norm_q._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_out, accessed_by=DictGetItemGuardAccessor(to_out) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.to_out, 140581769894096) # hidden_states = attn.to_out[0](hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1776 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_out.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_out.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.to_out.training, 140591004393440) # hidden_states = attn.to_out[0](hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1776 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_out[0], accessed_by=GetItemGuardAccessor(0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.to_out[0], 140533116172032) # hidden_states = attn.to_out[0](hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1776 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_out[0].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[12].attn.to_out[0].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_out[0].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.to_out[0].training, 140591004393408) # hidden_states = attn.to_out[0](hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1776 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_out[0]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_out[0].lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.to_out[0].lora_A, 140533116170592) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_out[0].lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_out[0].lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.to_out[0].lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_out[0].lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.to_out[0].lora_A['default_0'], 140533116992672) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_out[0].lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_out[0].lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.to_out[0].lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_out[0].lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_out[0].lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.to_out[0].lora_A['default_0'].weight, 140537315940192) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_out[0].lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.to_out[0].lora_B, 140533116162480) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_out[0].lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_out[0].lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.to_out[0].lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_out[0].lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.to_out[0].lora_B['default_0'], 140533116990800) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_out[0].lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_out[0].lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.to_out[0].lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_out[0].base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.to_out[0].base_layer, 140581769894144) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_out[0].base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_out[0].base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.to_out[0].base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_out[0].lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.to_out[0].lora_dropout, 140533116168192) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_out[0].lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_out[0].lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.to_out[0].lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_out[0].lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.to_out[0].lora_dropout['default_0'], 140533116171120) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_out[0].lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_out[0].lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.to_out[0].lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_out[0].scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[12].attn.to_out[0].scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[12].attn.to_out[0].scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_out[0].scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[12].attn.to_out[0].scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_out[0].use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[12].attn.to_out[0].use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[12].attn.to_out[0].use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_out[0].use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.to_out[0].use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_out[0]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_out[0]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_out[0].merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[12].attn.to_out[0].merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[12].attn.to_out[0].merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_out[0]._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.to_out[0]._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_out[0]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_out[0]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_out[0]._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[12].attn.to_out[0]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[12].attn.to_out[0]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_out[1], accessed_by=GetItemGuardAccessor(1) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.to_out[1], 140581769894192) # hidden_states = attn.to_out[1](hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1778 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_out[1].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_out[1].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.to_out[1].training, 140591004393440) # hidden_states = attn.to_out[1](hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1778 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.add_k_proj, accessed_by=DictGetItemGuardAccessor(add_k_proj) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.add_k_proj, 140533117514032) # encoder_hidden_states_key_proj = attn.add_k_proj(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1736 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.add_k_proj.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[12].attn.add_k_proj.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.add_k_proj.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.add_k_proj.training, 140591004393408) # encoder_hidden_states_key_proj = attn.add_k_proj(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1736 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.add_k_proj._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.add_k_proj.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.add_k_proj.lora_A, 140533117508032) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.add_k_proj.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.add_k_proj.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.add_k_proj.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.add_k_proj.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.add_k_proj.lora_A['default_0'], 140533117504000) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.add_k_proj.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.add_k_proj.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.add_k_proj.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.add_k_proj.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.add_k_proj.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.add_k_proj.lora_A['default_0'].weight, 140537316122896) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.add_k_proj.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.add_k_proj.lora_B, 140533117502512) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.add_k_proj.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.add_k_proj.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.add_k_proj.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.add_k_proj.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.add_k_proj.lora_B['default_0'], 140533117503280) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.add_k_proj.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.add_k_proj.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.add_k_proj.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.add_k_proj.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.add_k_proj.base_layer, 140581769893952) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.add_k_proj.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.add_k_proj.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.add_k_proj.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.add_k_proj.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.add_k_proj.lora_dropout, 140533117513120) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.add_k_proj.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.add_k_proj.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.add_k_proj.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.add_k_proj.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.add_k_proj.lora_dropout['default_0'], 140533117511488) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.add_k_proj.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.add_k_proj.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.add_k_proj.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.add_k_proj.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[12].attn.add_k_proj.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[12].attn.add_k_proj.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.add_k_proj.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[12].attn.add_k_proj.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.add_k_proj.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[12].attn.add_k_proj.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[12].attn.add_k_proj.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.add_k_proj.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.add_k_proj.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.add_k_proj._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.add_k_proj._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.add_k_proj.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[12].attn.add_k_proj.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[12].attn.add_k_proj.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.add_k_proj._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.add_k_proj._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.add_k_proj._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.add_k_proj._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.add_k_proj._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[12].attn.add_k_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[12].attn.add_k_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.add_q_proj, accessed_by=DictGetItemGuardAccessor(add_q_proj) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.add_q_proj, 140533117514800) # encoder_hidden_states_query_proj = attn.add_q_proj(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1735 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.add_q_proj.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[12].attn.add_q_proj.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.add_q_proj.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.add_q_proj.training, 140591004393408) # encoder_hidden_states_query_proj = attn.add_q_proj(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1735 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.add_q_proj._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.add_q_proj.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.add_q_proj.lora_A, 140533117507888) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.add_q_proj.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.add_q_proj.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.add_q_proj.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.add_q_proj.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.add_q_proj.lora_A['default_0'], 140533116161088) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.add_q_proj.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.add_q_proj.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.add_q_proj.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.add_q_proj.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.add_q_proj.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.add_q_proj.lora_A['default_0'].weight, 140537315943632) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.add_q_proj.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.add_q_proj.lora_B, 140533117507264) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.add_q_proj.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.add_q_proj.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.add_q_proj.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.add_q_proj.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.add_q_proj.lora_B['default_0'], 140533116162144) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.add_q_proj.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.add_q_proj.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.add_q_proj.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.add_q_proj.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.add_q_proj.base_layer, 140581769894048) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.add_q_proj.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.add_q_proj.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.add_q_proj.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.add_q_proj.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.add_q_proj.lora_dropout, 140533117511392) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.add_q_proj.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.add_q_proj.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.add_q_proj.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.add_q_proj.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.add_q_proj.lora_dropout['default_0'], 140533117501888) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.add_q_proj.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.add_q_proj.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.add_q_proj.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.add_q_proj.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[12].attn.add_q_proj.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[12].attn.add_q_proj.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.add_q_proj.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[12].attn.add_q_proj.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.add_q_proj.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[12].attn.add_q_proj.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[12].attn.add_q_proj.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.add_q_proj.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.add_q_proj.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.add_q_proj._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.add_q_proj._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.add_q_proj.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[12].attn.add_q_proj.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[12].attn.add_q_proj.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.add_q_proj._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.add_q_proj._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.add_q_proj._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.add_q_proj._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.add_q_proj._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[12].attn.add_q_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[12].attn.add_q_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.add_v_proj, accessed_by=DictGetItemGuardAccessor(add_v_proj) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.add_v_proj, 140533117503904) # encoder_hidden_states_value_proj = attn.add_v_proj(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1737 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.add_v_proj.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[12].attn.add_v_proj.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.add_v_proj.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.add_v_proj.training, 140591004393408) # encoder_hidden_states_value_proj = attn.add_v_proj(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1737 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.add_v_proj._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.add_v_proj.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.add_v_proj.lora_A, 140533117508176) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.add_v_proj.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.add_v_proj.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.add_v_proj.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.add_v_proj.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.add_v_proj.lora_A['default_0'], 140533117513408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.add_v_proj.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.add_v_proj.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.add_v_proj.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.add_v_proj.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.add_v_proj.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.add_v_proj.lora_A['default_0'].weight, 140537316118416) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.add_v_proj.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.add_v_proj.lora_B, 140533117510336) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.add_v_proj.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.add_v_proj.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.add_v_proj.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.add_v_proj.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.add_v_proj.lora_B['default_0'], 140533117509088) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.add_v_proj.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.add_v_proj.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.add_v_proj.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.add_v_proj.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.add_v_proj.base_layer, 140581769894000) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.add_v_proj.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.add_v_proj.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.add_v_proj.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.add_v_proj.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.add_v_proj.lora_dropout, 140533117513552) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.add_v_proj.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.add_v_proj.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.add_v_proj.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.add_v_proj.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.add_v_proj.lora_dropout['default_0'], 140533117507552) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.add_v_proj.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.add_v_proj.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.add_v_proj.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.add_v_proj.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[12].attn.add_v_proj.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[12].attn.add_v_proj.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.add_v_proj.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[12].attn.add_v_proj.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.add_v_proj.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[12].attn.add_v_proj.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[12].attn.add_v_proj.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.add_v_proj.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.add_v_proj.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.add_v_proj._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.add_v_proj._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.add_v_proj.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[12].attn.add_v_proj.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[12].attn.add_v_proj.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.add_v_proj._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.add_v_proj._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.add_v_proj._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.add_v_proj._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.add_v_proj._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[12].attn.add_v_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[12].attn.add_v_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_add_out, accessed_by=DictGetItemGuardAccessor(to_add_out) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.to_add_out, 140533116988064) # encoder_hidden_states = attn.to_add_out(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1779 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_add_out.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[12].attn.to_add_out.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_add_out.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.to_add_out.training, 140591004393408) # encoder_hidden_states = attn.to_add_out(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1779 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_add_out._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_add_out.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.to_add_out.lora_A, 140533115900240) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_add_out.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_add_out.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.to_add_out.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_add_out.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.to_add_out.lora_A['default_0'], 140533115899472) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_add_out.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_add_out.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.to_add_out.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_add_out.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_add_out.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.to_add_out.lora_A['default_0'].weight, 140537315945952) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_add_out.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.to_add_out.lora_B, 140533115899904) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_add_out.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_add_out.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.to_add_out.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_add_out.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.to_add_out.lora_B['default_0'], 140533115899280) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_add_out.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_add_out.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.to_add_out.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_add_out.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.to_add_out.base_layer, 140581769894240) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_add_out.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_add_out.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.to_add_out.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_add_out.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.to_add_out.lora_dropout, 140533115897504) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_add_out.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_add_out.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.to_add_out.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_add_out.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.to_add_out.lora_dropout['default_0'], 140533116987392) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_add_out.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_add_out.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.to_add_out.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_add_out.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[12].attn.to_add_out.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[12].attn.to_add_out.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_add_out.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[12].attn.to_add_out.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_add_out.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[12].attn.to_add_out.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[12].attn.to_add_out.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_add_out.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.to_add_out.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_add_out._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_add_out._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_add_out.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[12].attn.to_add_out.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[12].attn.to_add_out.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_add_out._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.to_add_out._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_add_out._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_add_out._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_add_out._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[12].attn.to_add_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[12].attn.to_add_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.norm_added_k, accessed_by=DictGetItemGuardAccessor(norm_added_k) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.norm_added_k, 140581769894384) # if attn.norm_added_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1751 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.norm_added_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[12].attn.norm_added_k.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.norm_added_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.norm_added_k.training, 140591004393440) # if attn.norm_added_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1751 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.norm_added_k.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[12].attn.norm_added_k.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.norm_added_k._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.norm_added_k.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.norm_added_k.weight, 140581773230944) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.norm_added_k._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.norm_added_k._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.norm_added_k._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.norm_added_k._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.norm_added_q, accessed_by=DictGetItemGuardAccessor(norm_added_q) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.norm_added_q, 140581769894288) # if attn.norm_added_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1749 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.norm_added_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[12].attn.norm_added_q.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.norm_added_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.norm_added_q.training, 140591004393440) # if attn.norm_added_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1749 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.norm_added_q.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[12].attn.norm_added_q.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.norm_added_q._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.norm_added_q.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.norm_added_q.weight, 140581785355984) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.norm_added_q._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.norm_added_q._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.norm_added_q._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.norm_added_q._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.heads, accessed_by=DictGetItemGuardAccessor(heads) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[12].attn.heads == 24 # head_dim = inner_dim // attn.heads # diffusers/src/diffusers/models/attention_processor.py:1721 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.processor, accessed_by=DictGetItemGuardAccessor(processor) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[12].attn.processor, 93831581524080) # attn_parameters = set(inspect.signature(self.processor.__call__).parameters.keys()) # diffusers/src/diffusers/models/attention_processor.py:479 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.processor, 140581769893520) # return self.processor( # diffusers/src/diffusers/models/attention_processor.py:490 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].norm1, accessed_by=DictGetItemGuardAccessor(norm1) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].norm1, 140581769893088) # norm_hidden_states, gate_msa, shift_mlp, scale_mlp, gate_mlp = self.norm1(hidden_states, emb=temb) # diffusers/src/diffusers/models/transformers/transformer_flux.py:165 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].norm1.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[12].norm1.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].norm1.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].norm1.training, 140591004393440) # norm_hidden_states, gate_msa, shift_mlp, scale_mlp, gate_mlp = self.norm1(hidden_states, emb=temb) # diffusers/src/diffusers/models/transformers/transformer_flux.py:165 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].norm1.emb, accessed_by=DictGetItemGuardAccessor(emb) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].norm1.emb, 140591004478624) # if self.emb is not None: # diffusers/src/diffusers/models/normalization.py:135 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].norm1._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].norm1.norm, accessed_by=DictGetItemGuardAccessor(norm) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].norm1.norm, 140581769893232) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:139 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].norm1.norm.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].norm1.norm.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].norm1.norm.training, 140591004393440) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:139 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].norm1.silu, accessed_by=DictGetItemGuardAccessor(silu) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].norm1.silu, 140581769893136) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].norm1.silu.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].norm1.silu.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].norm1.silu.training, 140591004393440) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].norm1.linear, accessed_by=DictGetItemGuardAccessor(linear) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].norm1.linear, 140533117335056) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].norm1.linear.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[12].norm1.linear.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].norm1.linear.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].norm1.linear.training, 140591004393408) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].norm1.linear._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].norm1.linear.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].norm1.linear.lora_A, 140533117323392) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].norm1.linear.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].norm1.linear.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].norm1.linear.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].norm1.linear.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].norm1.linear.lora_A['default_0'], 140533117710784) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].norm1.linear.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].norm1.linear.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].norm1.linear.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].norm1.linear.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].norm1.linear.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].norm1.linear.lora_A['default_0'].weight, 140537314150656) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].norm1.linear.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].norm1.linear.lora_B, 140533117324928) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].norm1.linear.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].norm1.linear.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].norm1.linear.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].norm1.linear.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].norm1.linear.lora_B['default_0'], 140533117712752) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].norm1.linear.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].norm1.linear.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].norm1.linear.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].norm1.linear.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].norm1.linear.base_layer, 140581769893184) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].norm1.linear.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].norm1.linear.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].norm1.linear.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].norm1.linear.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].norm1.linear.lora_dropout, 140533117329344) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].norm1.linear.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].norm1.linear.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].norm1.linear.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].norm1.linear.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].norm1.linear.lora_dropout['default_0'], 140533117334384) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].norm1.linear.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].norm1.linear.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].norm1.linear.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].norm1.linear.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[12].norm1.linear.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[12].norm1.linear.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].norm1.linear.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[12].norm1.linear.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].norm1.linear.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[12].norm1.linear.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[12].norm1.linear.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].norm1.linear.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].norm1.linear.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].norm1.linear._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].norm1.linear._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].norm1.linear.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[12].norm1.linear.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[12].norm1.linear.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].norm1.linear._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].norm1.linear._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].norm1.linear._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].norm1.linear._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].norm1.linear._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[12].norm1.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[12].norm1.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].norm1._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].norm1._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].norm1._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].norm1._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].norm2, accessed_by=DictGetItemGuardAccessor(norm2) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].norm2, 140581769894432) # norm_hidden_states = self.norm2(hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:182 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].norm2.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].norm2.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].norm2.training, 140591004393440) # norm_hidden_states = self.norm2(hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:182 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff_context, accessed_by=DictGetItemGuardAccessor(ff_context) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].ff_context, 140581769894768) # context_ff_output = self.ff_context(norm_encoder_hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:198 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff_context.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[12].ff_context.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff_context.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].ff_context.training, 140591004393440) # context_ff_output = self.ff_context(norm_encoder_hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:198 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff_context._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff_context.net, accessed_by=DictGetItemGuardAccessor(net) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].ff_context.net, 140581769894912) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[12].ff_context.net, 93831537618768) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- LENGTH_CHECK: len(L['self'].transformer_blocks[12].ff_context.net) == 3 # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff_context.net.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff_context.net.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].ff_context.net.training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff_context.net[0], accessed_by=GetItemGuardAccessor(0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].ff_context.net[0], 140581769894864) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff_context.net[0].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[12].ff_context.net[0].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff_context.net[0].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].ff_context.net[0].training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff_context.net[0]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff_context.net[0].proj, accessed_by=DictGetItemGuardAccessor(proj) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].ff_context.net[0].proj, 140533117668992) # hidden_states = self.proj(hidden_states) # diffusers/src/diffusers/models/activations.py:88 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff_context.net[0].proj.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[12].ff_context.net[0].proj.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff_context.net[0].proj.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].ff_context.net[0].proj.training, 140591004393408) # hidden_states = self.proj(hidden_states) # diffusers/src/diffusers/models/activations.py:88 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff_context.net[0].proj._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff_context.net[0].proj.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].ff_context.net[0].proj.lora_A, 140533117670816) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff_context.net[0].proj.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff_context.net[0].proj.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].ff_context.net[0].proj.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff_context.net[0].proj.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].ff_context.net[0].proj.lora_A['default_0'], 140533117669424) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff_context.net[0].proj.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff_context.net[0].proj.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].ff_context.net[0].proj.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff_context.net[0].proj.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff_context.net[0].proj.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].ff_context.net[0].proj.lora_A['default_0'].weight, 140537315938112) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff_context.net[0].proj.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].ff_context.net[0].proj.lora_B, 140533117669856) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff_context.net[0].proj.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff_context.net[0].proj.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].ff_context.net[0].proj.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff_context.net[0].proj.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].ff_context.net[0].proj.lora_B['default_0'], 140533117669232) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff_context.net[0].proj.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff_context.net[0].proj.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].ff_context.net[0].proj.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff_context.net[0].proj.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].ff_context.net[0].proj.base_layer, 140581769894960) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff_context.net[0].proj.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff_context.net[0].proj.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].ff_context.net[0].proj.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff_context.net[0].proj.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].ff_context.net[0].proj.lora_dropout, 140533117666736) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff_context.net[0].proj.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff_context.net[0].proj.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].ff_context.net[0].proj.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff_context.net[0].proj.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].ff_context.net[0].proj.lora_dropout['default_0'], 140533117668848) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff_context.net[0].proj.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff_context.net[0].proj.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].ff_context.net[0].proj.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff_context.net[0].proj.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[12].ff_context.net[0].proj.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[12].ff_context.net[0].proj.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff_context.net[0].proj.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[12].ff_context.net[0].proj.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff_context.net[0].proj.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[12].ff_context.net[0].proj.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[12].ff_context.net[0].proj.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff_context.net[0].proj.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].ff_context.net[0].proj.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff_context.net[0].proj._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff_context.net[0].proj._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff_context.net[0].proj.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[12].ff_context.net[0].proj.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[12].ff_context.net[0].proj.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff_context.net[0].proj._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].ff_context.net[0].proj._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff_context.net[0].proj._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff_context.net[0].proj._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff_context.net[0].proj._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[12].ff_context.net[0].proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[12].ff_context.net[0].proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff_context.net[0].approximate, accessed_by=DictGetItemGuardAccessor(approximate) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[12].ff_context.net[0].approximate == 'tanh' # return F.gelu(gate, approximate=self.approximate) # diffusers/src/diffusers/models/activations.py:83 in gelu V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff_context.net[0]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff_context.net[0]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff_context.net[0]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff_context.net[0]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff_context.net[1], accessed_by=GetItemGuardAccessor(1) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].ff_context.net[1], 140581769895056) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff_context.net[1].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff_context.net[1].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].ff_context.net[1].training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff_context.net[2], accessed_by=GetItemGuardAccessor(2) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].ff_context.net[2], 140533117671824) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff_context.net[2].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[12].ff_context.net[2].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff_context.net[2].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].ff_context.net[2].training, 140591004393408) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff_context.net[2]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff_context.net[2].lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].ff_context.net[2].lora_A, 140533117675952) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff_context.net[2].lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff_context.net[2].lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].ff_context.net[2].lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff_context.net[2].lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].ff_context.net[2].lora_A['default_0'], 140533116418032) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff_context.net[2].lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff_context.net[2].lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].ff_context.net[2].lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff_context.net[2].lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff_context.net[2].lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].ff_context.net[2].lora_A['default_0'].weight, 140537315815920) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff_context.net[2].lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].ff_context.net[2].lora_B, 140533116419712) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff_context.net[2].lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff_context.net[2].lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].ff_context.net[2].lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff_context.net[2].lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].ff_context.net[2].lora_B['default_0'], 140533116418896) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff_context.net[2].lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff_context.net[2].lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].ff_context.net[2].lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff_context.net[2].base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].ff_context.net[2].base_layer, 140581769895104) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff_context.net[2].base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff_context.net[2].base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].ff_context.net[2].base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff_context.net[2].lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].ff_context.net[2].lora_dropout, 140533117680224) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff_context.net[2].lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff_context.net[2].lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].ff_context.net[2].lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff_context.net[2].lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].ff_context.net[2].lora_dropout['default_0'], 140533117674272) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff_context.net[2].lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff_context.net[2].lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].ff_context.net[2].lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff_context.net[2].scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[12].ff_context.net[2].scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[12].ff_context.net[2].scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff_context.net[2].scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[12].ff_context.net[2].scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff_context.net[2].use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[12].ff_context.net[2].use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[12].ff_context.net[2].use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff_context.net[2].use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].ff_context.net[2].use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff_context.net[2]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff_context.net[2]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff_context.net[2].merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[12].ff_context.net[2].merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[12].ff_context.net[2].merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff_context.net[2]._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].ff_context.net[2]._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff_context.net[2]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff_context.net[2]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff_context.net[2]._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[12].ff_context.net[2]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[12].ff_context.net[2]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff_context._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff_context._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff_context._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff_context._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].norm1_context, accessed_by=DictGetItemGuardAccessor(norm1_context) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].norm1_context, 140581769893280) # norm_encoder_hidden_states, c_gate_msa, c_shift_mlp, c_scale_mlp, c_gate_mlp = self.norm1_context( # diffusers/src/diffusers/models/transformers/transformer_flux.py:167 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].norm1_context.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[12].norm1_context.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].norm1_context.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].norm1_context.training, 140591004393440) # norm_encoder_hidden_states, c_gate_msa, c_shift_mlp, c_scale_mlp, c_gate_mlp = self.norm1_context( # diffusers/src/diffusers/models/transformers/transformer_flux.py:167 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].norm1_context.emb, accessed_by=DictGetItemGuardAccessor(emb) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].norm1_context.emb, 140591004478624) # if self.emb is not None: # diffusers/src/diffusers/models/normalization.py:135 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].norm1_context._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].norm1_context.norm, accessed_by=DictGetItemGuardAccessor(norm) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].norm1_context.norm, 140581769893472) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:139 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].norm1_context.norm.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].norm1_context.norm.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].norm1_context.norm.training, 140591004393440) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:139 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].norm1_context.silu, accessed_by=DictGetItemGuardAccessor(silu) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].norm1_context.silu, 140581769893376) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].norm1_context.silu.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].norm1_context.silu.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].norm1_context.silu.training, 140591004393440) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].norm1_context.linear, accessed_by=DictGetItemGuardAccessor(linear) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].norm1_context.linear, 140533117712464) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].norm1_context.linear.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[12].norm1_context.linear.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].norm1_context.linear.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].norm1_context.linear.training, 140591004393408) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].norm1_context.linear._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].norm1_context.linear.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].norm1_context.linear.lora_A, 140533117703440) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].norm1_context.linear.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].norm1_context.linear.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].norm1_context.linear.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].norm1_context.linear.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].norm1_context.linear.lora_A['default_0'], 140533117107456) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].norm1_context.linear.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].norm1_context.linear.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].norm1_context.linear.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].norm1_context.linear.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].norm1_context.linear.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].norm1_context.linear.lora_A['default_0'].weight, 140537316114896) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].norm1_context.linear.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].norm1_context.linear.lora_B, 140533117097712) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].norm1_context.linear.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].norm1_context.linear.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].norm1_context.linear.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].norm1_context.linear.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].norm1_context.linear.lora_B['default_0'], 140533117106064) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].norm1_context.linear.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].norm1_context.linear.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].norm1_context.linear.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].norm1_context.linear.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].norm1_context.linear.base_layer, 140581769893424) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].norm1_context.linear.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].norm1_context.linear.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].norm1_context.linear.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].norm1_context.linear.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].norm1_context.linear.lora_dropout, 140533117714240) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].norm1_context.linear.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].norm1_context.linear.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].norm1_context.linear.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].norm1_context.linear.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].norm1_context.linear.lora_dropout['default_0'], 140533117710592) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].norm1_context.linear.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].norm1_context.linear.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].norm1_context.linear.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].norm1_context.linear.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[12].norm1_context.linear.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[12].norm1_context.linear.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].norm1_context.linear.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[12].norm1_context.linear.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].norm1_context.linear.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[12].norm1_context.linear.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[12].norm1_context.linear.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].norm1_context.linear.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].norm1_context.linear.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].norm1_context.linear._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].norm1_context.linear._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].norm1_context.linear.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[12].norm1_context.linear.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[12].norm1_context.linear.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].norm1_context.linear._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].norm1_context.linear._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].norm1_context.linear._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].norm1_context.linear._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].norm1_context.linear._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[12].norm1_context.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[12].norm1_context.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].norm1_context._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].norm1_context._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].norm1_context._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].norm1_context._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].norm2_context, accessed_by=DictGetItemGuardAccessor(norm2_context) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].norm2_context, 140581769894480) # norm_encoder_hidden_states = self.norm2_context(encoder_hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:195 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].norm2_context.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].norm2_context.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].norm2_context.training, 140591004393440) # norm_encoder_hidden_states = self.norm2_context(encoder_hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:195 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | +- GuardManager: source=L['self'].transformer_blocks[13], accessed_by=GetItemGuardAccessor(13) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13], 140581769892752) # for index_block, block in enumerate(self.transformer_blocks): # diffusers/src/diffusers/models/transformers/transformer_flux.py:471 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[13].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].training, 140591004393440) # for index_block, block in enumerate(self.transformer_blocks): # diffusers/src/diffusers/models/transformers/transformer_flux.py:471 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff, accessed_by=DictGetItemGuardAccessor(ff) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].ff, 140581769896400) # ff_output = self.ff(norm_hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:185 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[13].ff.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].ff.training, 140591004393440) # ff_output = self.ff(norm_hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:185 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff.net, accessed_by=DictGetItemGuardAccessor(net) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].ff.net, 140581769896640) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[13].ff.net, 93831537618768) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- LENGTH_CHECK: len(L['self'].transformer_blocks[13].ff.net) == 3 # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff.net.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff.net.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].ff.net.training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff.net[0], accessed_by=GetItemGuardAccessor(0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].ff.net[0], 140581769896592) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff.net[0].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[13].ff.net[0].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff.net[0].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].ff.net[0].training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff.net[0]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff.net[0].proj, accessed_by=DictGetItemGuardAccessor(proj) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].ff.net[0].proj, 140533116026784) # hidden_states = self.proj(hidden_states) # diffusers/src/diffusers/models/activations.py:88 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff.net[0].proj.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[13].ff.net[0].proj.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff.net[0].proj.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].ff.net[0].proj.training, 140591004393408) # hidden_states = self.proj(hidden_states) # diffusers/src/diffusers/models/activations.py:88 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff.net[0].proj._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff.net[0].proj.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].ff.net[0].proj.lora_A, 140533116021744) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff.net[0].proj.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff.net[0].proj.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].ff.net[0].proj.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff.net[0].proj.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].ff.net[0].proj.lora_A['default_0'], 140533116366960) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff.net[0].proj.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff.net[0].proj.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].ff.net[0].proj.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff.net[0].proj.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff.net[0].proj.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].ff.net[0].proj.lora_A['default_0'].weight, 140537315695392) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff.net[0].proj.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].ff.net[0].proj.lora_B, 140533116361872) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff.net[0].proj.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff.net[0].proj.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].ff.net[0].proj.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff.net[0].proj.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].ff.net[0].proj.lora_B['default_0'], 140533116362112) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff.net[0].proj.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff.net[0].proj.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].ff.net[0].proj.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff.net[0].proj.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].ff.net[0].proj.base_layer, 140581769896688) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff.net[0].proj.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff.net[0].proj.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].ff.net[0].proj.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff.net[0].proj.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].ff.net[0].proj.lora_dropout, 140533116021360) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff.net[0].proj.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff.net[0].proj.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].ff.net[0].proj.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff.net[0].proj.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].ff.net[0].proj.lora_dropout['default_0'], 140533116021216) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff.net[0].proj.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff.net[0].proj.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].ff.net[0].proj.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff.net[0].proj.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[13].ff.net[0].proj.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[13].ff.net[0].proj.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff.net[0].proj.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[13].ff.net[0].proj.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff.net[0].proj.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[13].ff.net[0].proj.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[13].ff.net[0].proj.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff.net[0].proj.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].ff.net[0].proj.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff.net[0].proj._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff.net[0].proj._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff.net[0].proj.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[13].ff.net[0].proj.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[13].ff.net[0].proj.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff.net[0].proj._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].ff.net[0].proj._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff.net[0].proj._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff.net[0].proj._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff.net[0].proj._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[13].ff.net[0].proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[13].ff.net[0].proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff.net[0].approximate, accessed_by=DictGetItemGuardAccessor(approximate) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[13].ff.net[0].approximate == 'tanh' # return F.gelu(gate, approximate=self.approximate) # diffusers/src/diffusers/models/activations.py:83 in gelu V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff.net[0]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff.net[0]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff.net[0]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff.net[0]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff.net[1], accessed_by=GetItemGuardAccessor(1) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].ff.net[1], 140581769896736) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff.net[1].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff.net[1].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].ff.net[1].training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff.net[2], accessed_by=GetItemGuardAccessor(2) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].ff.net[2], 140533116367392) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff.net[2].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[13].ff.net[2].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff.net[2].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].ff.net[2].training, 140591004393408) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff.net[2]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff.net[2].lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].ff.net[2].lora_A, 140533116355344) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff.net[2].lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff.net[2].lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].ff.net[2].lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff.net[2].lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].ff.net[2].lora_A['default_0'], 140533116356688) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff.net[2].lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff.net[2].lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].ff.net[2].lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff.net[2].lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff.net[2].lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].ff.net[2].lora_A['default_0'].weight, 140537315494464) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff.net[2].lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].ff.net[2].lora_B, 140533116364368) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff.net[2].lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff.net[2].lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].ff.net[2].lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff.net[2].lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].ff.net[2].lora_B['default_0'], 140533116363648) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff.net[2].lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff.net[2].lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].ff.net[2].lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff.net[2].base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].ff.net[2].base_layer, 140581769896784) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff.net[2].base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff.net[2].base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].ff.net[2].base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff.net[2].lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].ff.net[2].lora_dropout, 140533116356304) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff.net[2].lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff.net[2].lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].ff.net[2].lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff.net[2].lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].ff.net[2].lora_dropout['default_0'], 140533116356160) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff.net[2].lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff.net[2].lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].ff.net[2].lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff.net[2].scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[13].ff.net[2].scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[13].ff.net[2].scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff.net[2].scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[13].ff.net[2].scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff.net[2].use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[13].ff.net[2].use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[13].ff.net[2].use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff.net[2].use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].ff.net[2].use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff.net[2]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff.net[2]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff.net[2].merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[13].ff.net[2].merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[13].ff.net[2].merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff.net[2]._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].ff.net[2]._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff.net[2]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff.net[2]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff.net[2]._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[13].ff.net[2]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[13].ff.net[2]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn, accessed_by=DictGetItemGuardAccessor(attn) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn, 140581769895632) # attn_output, context_attn_output = self.attn( # diffusers/src/diffusers/models/transformers/transformer_flux.py:172 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[13].attn.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.training, 140591004393440) # attn_output, context_attn_output = self.attn( # diffusers/src/diffusers/models/transformers/transformer_flux.py:172 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_k, accessed_by=DictGetItemGuardAccessor(to_k) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.to_k, 140533116777616) # key = attn.to_k(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1717 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[13].attn.to_k.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.to_k.training, 140591004393408) # key = attn.to_k(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1717 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_k._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_k.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.to_k.lora_A, 140533116769456) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_k.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_k.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.to_k.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_k.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.to_k.lora_A['default_0'], 140533117724816) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_k.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_k.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.to_k.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_k.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_k.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.to_k.lora_A['default_0'].weight, 140537315815440) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_k.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.to_k.lora_B, 140533116766960) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_k.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_k.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.to_k.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_k.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.to_k.lora_B['default_0'], 140533117723040) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_k.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_k.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.to_k.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_k.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.to_k.base_layer, 140581769895776) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_k.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_k.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.to_k.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_k.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.to_k.lora_dropout, 140533116767056) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_k.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_k.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.to_k.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_k.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.to_k.lora_dropout['default_0'], 140533116768160) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_k.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_k.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.to_k.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_k.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[13].attn.to_k.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[13].attn.to_k.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_k.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[13].attn.to_k.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_k.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[13].attn.to_k.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[13].attn.to_k.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_k.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.to_k.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_k._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_k._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_k.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[13].attn.to_k.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[13].attn.to_k.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_k._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.to_k._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_k._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_k._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_k._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[13].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[13].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_q, accessed_by=DictGetItemGuardAccessor(to_q) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.to_q, 140533116410448) # query = attn.to_q(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1716 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[13].attn.to_q.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.to_q.training, 140591004393408) # query = attn.to_q(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1716 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_q._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_q.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.to_q.lora_A, 140533116412800) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_q.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_q.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.to_q.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_q.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.to_q.lora_A['default_0'], 140533116775360) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_q.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_q.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.to_q.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_q.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_q.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.to_q.lora_A['default_0'].weight, 140537315807120) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_q.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.to_q.lora_B, 140533116405696) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_q.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_q.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.to_q.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_q.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.to_q.lora_B['default_0'], 140533116764272) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_q.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_q.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.to_q.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_q.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.to_q.base_layer, 140581769895872) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_q.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_q.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.to_q.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_q.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.to_q.lora_dropout, 140533116414288) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_q.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_q.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.to_q.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_q.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.to_q.lora_dropout['default_0'], 140533116405648) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_q.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_q.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.to_q.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_q.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[13].attn.to_q.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[13].attn.to_q.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_q.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[13].attn.to_q.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_q.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[13].attn.to_q.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[13].attn.to_q.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_q.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.to_q.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_q._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_q._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_q.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[13].attn.to_q.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[13].attn.to_q.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_q._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.to_q._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_q._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_q._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_q._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[13].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[13].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_v, accessed_by=DictGetItemGuardAccessor(to_v) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.to_v, 140533117729664) # value = attn.to_v(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1718 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_v.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[13].attn.to_v.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_v.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.to_v.training, 140591004393408) # value = attn.to_v(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1718 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_v._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_v.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.to_v.lora_A, 140533115770752) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_v.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_v.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.to_v.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_v.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.to_v.lora_A['default_0'], 140533115771856) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_v.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_v.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.to_v.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_v.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_v.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.to_v.lora_A['default_0'].weight, 140537315810320) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_v.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.to_v.lora_B, 140533115773008) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_v.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_v.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.to_v.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_v.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.to_v.lora_B['default_0'], 140533115780208) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_v.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_v.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.to_v.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_v.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.to_v.base_layer, 140581769895968) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_v.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_v.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.to_v.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_v.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.to_v.lora_dropout, 140533117723136) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_v.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_v.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.to_v.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_v.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.to_v.lora_dropout['default_0'], 140533117717520) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_v.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_v.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.to_v.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_v.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[13].attn.to_v.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[13].attn.to_v.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_v.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[13].attn.to_v.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_v.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[13].attn.to_v.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[13].attn.to_v.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_v.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.to_v.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_v._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_v._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_v.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[13].attn.to_v.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[13].attn.to_v.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_v._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.to_v._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_v._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_v._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_v._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[13].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[13].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.norm_k, accessed_by=DictGetItemGuardAccessor(norm_k) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.norm_k, 140581769895824) # if attn.norm_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1729 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.norm_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[13].attn.norm_k.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.norm_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.norm_k.training, 140591004393440) # if attn.norm_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1729 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.norm_k.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[13].attn.norm_k.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.norm_k._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.norm_k.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.norm_k.weight, 140581765997456) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.norm_k._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.norm_k._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.norm_k._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.norm_k._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.norm_q, accessed_by=DictGetItemGuardAccessor(norm_q) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.norm_q, 140581769895728) # if attn.norm_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1727 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.norm_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[13].attn.norm_q.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.norm_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.norm_q.training, 140591004393440) # if attn.norm_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1727 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.norm_q.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[13].attn.norm_q.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.norm_q._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.norm_q.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.norm_q.weight, 140581765997536) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.norm_q._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.norm_q._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.norm_q._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.norm_q._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_out, accessed_by=DictGetItemGuardAccessor(to_out) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.to_out, 140581769896160) # hidden_states = attn.to_out[0](hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1776 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_out.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_out.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.to_out.training, 140591004393440) # hidden_states = attn.to_out[0](hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1776 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_out[0], accessed_by=GetItemGuardAccessor(0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.to_out[0], 140533117412656) # hidden_states = attn.to_out[0](hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1776 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_out[0].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[13].attn.to_out[0].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_out[0].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.to_out[0].training, 140591004393408) # hidden_states = attn.to_out[0](hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1776 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_out[0]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_out[0].lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.to_out[0].lora_A, 140533117409728) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_out[0].lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_out[0].lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.to_out[0].lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_out[0].lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.to_out[0].lora_A['default_0'], 140533117408192) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_out[0].lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_out[0].lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.to_out[0].lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_out[0].lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_out[0].lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.to_out[0].lora_A['default_0'].weight, 140537315700512) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_out[0].lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.to_out[0].lora_B, 140533117408144) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_out[0].lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_out[0].lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.to_out[0].lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_out[0].lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.to_out[0].lora_B['default_0'], 140533117404784) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_out[0].lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_out[0].lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.to_out[0].lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_out[0].base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.to_out[0].base_layer, 140581769896208) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_out[0].base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_out[0].base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.to_out[0].base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_out[0].lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.to_out[0].lora_dropout, 140533117411888) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_out[0].lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_out[0].lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.to_out[0].lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_out[0].lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.to_out[0].lora_dropout['default_0'], 140533117412848) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_out[0].lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_out[0].lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.to_out[0].lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_out[0].scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[13].attn.to_out[0].scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[13].attn.to_out[0].scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_out[0].scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[13].attn.to_out[0].scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_out[0].use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[13].attn.to_out[0].use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[13].attn.to_out[0].use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_out[0].use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.to_out[0].use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_out[0]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_out[0]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_out[0].merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[13].attn.to_out[0].merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[13].attn.to_out[0].merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_out[0]._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.to_out[0]._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_out[0]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_out[0]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_out[0]._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[13].attn.to_out[0]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[13].attn.to_out[0]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_out[1], accessed_by=GetItemGuardAccessor(1) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.to_out[1], 140581769896256) # hidden_states = attn.to_out[1](hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1778 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_out[1].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_out[1].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.to_out[1].training, 140591004393440) # hidden_states = attn.to_out[1](hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1778 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.add_k_proj, accessed_by=DictGetItemGuardAccessor(add_k_proj) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.add_k_proj, 140533117742928) # encoder_hidden_states_key_proj = attn.add_k_proj(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1736 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.add_k_proj.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[13].attn.add_k_proj.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.add_k_proj.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.add_k_proj.training, 140591004393408) # encoder_hidden_states_key_proj = attn.add_k_proj(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1736 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.add_k_proj._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.add_k_proj.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.add_k_proj.lora_A, 140533117128944) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.add_k_proj.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.add_k_proj.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.add_k_proj.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.add_k_proj.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.add_k_proj.lora_A['default_0'], 140533117133648) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.add_k_proj.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.add_k_proj.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.add_k_proj.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.add_k_proj.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.add_k_proj.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.add_k_proj.lora_A['default_0'].weight, 140537315701472) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.add_k_proj.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.add_k_proj.lora_B, 140533117140272) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.add_k_proj.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.add_k_proj.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.add_k_proj.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.add_k_proj.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.add_k_proj.lora_B['default_0'], 140533117137440) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.add_k_proj.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.add_k_proj.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.add_k_proj.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.add_k_proj.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.add_k_proj.base_layer, 140581769896016) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.add_k_proj.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.add_k_proj.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.add_k_proj.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.add_k_proj.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.add_k_proj.lora_dropout, 140533117135904) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.add_k_proj.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.add_k_proj.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.add_k_proj.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.add_k_proj.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.add_k_proj.lora_dropout['default_0'], 140533117128848) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.add_k_proj.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.add_k_proj.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.add_k_proj.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.add_k_proj.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[13].attn.add_k_proj.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[13].attn.add_k_proj.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.add_k_proj.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[13].attn.add_k_proj.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.add_k_proj.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[13].attn.add_k_proj.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[13].attn.add_k_proj.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.add_k_proj.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.add_k_proj.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.add_k_proj._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.add_k_proj._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.add_k_proj.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[13].attn.add_k_proj.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[13].attn.add_k_proj.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.add_k_proj._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.add_k_proj._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.add_k_proj._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.add_k_proj._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.add_k_proj._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[13].attn.add_k_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[13].attn.add_k_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.add_q_proj, accessed_by=DictGetItemGuardAccessor(add_q_proj) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.add_q_proj, 140533117126976) # encoder_hidden_states_query_proj = attn.add_q_proj(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1735 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.add_q_proj.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[13].attn.add_q_proj.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.add_q_proj.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.add_q_proj.training, 140591004393408) # encoder_hidden_states_query_proj = attn.add_q_proj(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1735 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.add_q_proj._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.add_q_proj.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.add_q_proj.lora_A, 140533115926992) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.add_q_proj.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.add_q_proj.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.add_q_proj.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.add_q_proj.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.add_q_proj.lora_A['default_0'], 140533117403488) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.add_q_proj.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.add_q_proj.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.add_q_proj.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.add_q_proj.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.add_q_proj.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.add_q_proj.lora_A['default_0'].weight, 140537315696112) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.add_q_proj.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.add_q_proj.lora_B, 140533115921472) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.add_q_proj.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.add_q_proj.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.add_q_proj.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.add_q_proj.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.add_q_proj.lora_B['default_0'], 140533117404736) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.add_q_proj.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.add_q_proj.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.add_q_proj.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.add_q_proj.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.add_q_proj.base_layer, 140581769896112) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.add_q_proj.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.add_q_proj.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.add_q_proj.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.add_q_proj.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.add_q_proj.lora_dropout, 140533115927616) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.add_q_proj.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.add_q_proj.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.add_q_proj.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.add_q_proj.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.add_q_proj.lora_dropout['default_0'], 140533115927040) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.add_q_proj.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.add_q_proj.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.add_q_proj.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.add_q_proj.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[13].attn.add_q_proj.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[13].attn.add_q_proj.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.add_q_proj.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[13].attn.add_q_proj.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.add_q_proj.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[13].attn.add_q_proj.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[13].attn.add_q_proj.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.add_q_proj.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.add_q_proj.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.add_q_proj._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.add_q_proj._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.add_q_proj.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[13].attn.add_q_proj.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[13].attn.add_q_proj.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.add_q_proj._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.add_q_proj._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.add_q_proj._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.add_q_proj._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.add_q_proj._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[13].attn.add_q_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[13].attn.add_q_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.add_v_proj, accessed_by=DictGetItemGuardAccessor(add_v_proj) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.add_v_proj, 140533115772000) # encoder_hidden_states_value_proj = attn.add_v_proj(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1737 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.add_v_proj.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[13].attn.add_v_proj.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.add_v_proj.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.add_v_proj.training, 140591004393408) # encoder_hidden_states_value_proj = attn.add_v_proj(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1737 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.add_v_proj._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.add_v_proj.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.add_v_proj.lora_A, 140533117871984) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.add_v_proj.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.add_v_proj.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.add_v_proj.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.add_v_proj.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.add_v_proj.lora_A['default_0'], 140533117876640) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.add_v_proj.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.add_v_proj.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.add_v_proj.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.add_v_proj.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.add_v_proj.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.add_v_proj.lora_A['default_0'].weight, 140537315694992) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.add_v_proj.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.add_v_proj.lora_B, 140533117870352) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.add_v_proj.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.add_v_proj.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.add_v_proj.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.add_v_proj.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.add_v_proj.lora_B['default_0'], 140533117872896) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.add_v_proj.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.add_v_proj.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.add_v_proj.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.add_v_proj.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.add_v_proj.base_layer, 140581769896064) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.add_v_proj.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.add_v_proj.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.add_v_proj.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.add_v_proj.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.add_v_proj.lora_dropout, 140533117871888) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.add_v_proj.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.add_v_proj.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.add_v_proj.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.add_v_proj.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.add_v_proj.lora_dropout['default_0'], 140533117862528) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.add_v_proj.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.add_v_proj.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.add_v_proj.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.add_v_proj.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[13].attn.add_v_proj.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[13].attn.add_v_proj.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.add_v_proj.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[13].attn.add_v_proj.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.add_v_proj.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[13].attn.add_v_proj.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[13].attn.add_v_proj.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.add_v_proj.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.add_v_proj.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.add_v_proj._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.add_v_proj._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.add_v_proj.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[13].attn.add_v_proj.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[13].attn.add_v_proj.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.add_v_proj._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.add_v_proj._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.add_v_proj._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.add_v_proj._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.add_v_proj._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[13].attn.add_v_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[13].attn.add_v_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_add_out, accessed_by=DictGetItemGuardAccessor(to_add_out) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.to_add_out, 140533117403584) # encoder_hidden_states = attn.to_add_out(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1779 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_add_out.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[13].attn.to_add_out.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_add_out.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.to_add_out.training, 140591004393408) # encoder_hidden_states = attn.to_add_out(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1779 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_add_out._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_add_out.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.to_add_out.lora_A, 140533117416304) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_add_out.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_add_out.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.to_add_out.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_add_out.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.to_add_out.lora_A['default_0'], 140533116022368) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_add_out.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_add_out.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.to_add_out.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_add_out.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_add_out.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.to_add_out.lora_A['default_0'].weight, 140537315698272) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_add_out.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.to_add_out.lora_B, 140533117418320) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_add_out.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_add_out.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.to_add_out.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_add_out.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.to_add_out.lora_B['default_0'], 140533116015888) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_add_out.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_add_out.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.to_add_out.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_add_out.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.to_add_out.base_layer, 140581769896304) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_add_out.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_add_out.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.to_add_out.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_add_out.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.to_add_out.lora_dropout, 140533117418752) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_add_out.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_add_out.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.to_add_out.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_add_out.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.to_add_out.lora_dropout['default_0'], 140533117412176) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_add_out.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_add_out.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.to_add_out.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_add_out.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[13].attn.to_add_out.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[13].attn.to_add_out.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_add_out.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[13].attn.to_add_out.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_add_out.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[13].attn.to_add_out.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[13].attn.to_add_out.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_add_out.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.to_add_out.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_add_out._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_add_out._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_add_out.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[13].attn.to_add_out.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[13].attn.to_add_out.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_add_out._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.to_add_out._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_add_out._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_add_out._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_add_out._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[13].attn.to_add_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[13].attn.to_add_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.norm_added_k, accessed_by=DictGetItemGuardAccessor(norm_added_k) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.norm_added_k, 140581769896448) # if attn.norm_added_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1751 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.norm_added_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[13].attn.norm_added_k.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.norm_added_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.norm_added_k.training, 140591004393440) # if attn.norm_added_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1751 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.norm_added_k.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[13].attn.norm_added_k.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.norm_added_k._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.norm_added_k.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.norm_added_k.weight, 140581765997296) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.norm_added_k._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.norm_added_k._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.norm_added_k._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.norm_added_k._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.norm_added_q, accessed_by=DictGetItemGuardAccessor(norm_added_q) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.norm_added_q, 140581769896352) # if attn.norm_added_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1749 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.norm_added_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[13].attn.norm_added_q.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.norm_added_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.norm_added_q.training, 140591004393440) # if attn.norm_added_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1749 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.norm_added_q.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[13].attn.norm_added_q.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.norm_added_q._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.norm_added_q.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.norm_added_q.weight, 140581765997376) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.norm_added_q._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.norm_added_q._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.norm_added_q._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.norm_added_q._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.heads, accessed_by=DictGetItemGuardAccessor(heads) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[13].attn.heads == 24 # head_dim = inner_dim // attn.heads # diffusers/src/diffusers/models/attention_processor.py:1721 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.processor, accessed_by=DictGetItemGuardAccessor(processor) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[13].attn.processor, 93831581524080) # attn_parameters = set(inspect.signature(self.processor.__call__).parameters.keys()) # diffusers/src/diffusers/models/attention_processor.py:479 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.processor, 140581769895584) # return self.processor( # diffusers/src/diffusers/models/attention_processor.py:490 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].norm1, accessed_by=DictGetItemGuardAccessor(norm1) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].norm1, 140581769895152) # norm_hidden_states, gate_msa, shift_mlp, scale_mlp, gate_mlp = self.norm1(hidden_states, emb=temb) # diffusers/src/diffusers/models/transformers/transformer_flux.py:165 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].norm1.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[13].norm1.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].norm1.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].norm1.training, 140591004393440) # norm_hidden_states, gate_msa, shift_mlp, scale_mlp, gate_mlp = self.norm1(hidden_states, emb=temb) # diffusers/src/diffusers/models/transformers/transformer_flux.py:165 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].norm1.emb, accessed_by=DictGetItemGuardAccessor(emb) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].norm1.emb, 140591004478624) # if self.emb is not None: # diffusers/src/diffusers/models/normalization.py:135 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].norm1._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].norm1.norm, accessed_by=DictGetItemGuardAccessor(norm) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].norm1.norm, 140581769895296) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:139 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].norm1.norm.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].norm1.norm.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].norm1.norm.training, 140591004393440) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:139 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].norm1.silu, accessed_by=DictGetItemGuardAccessor(silu) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].norm1.silu, 140581769895200) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].norm1.silu.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].norm1.silu.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].norm1.silu.training, 140591004393440) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].norm1.linear, accessed_by=DictGetItemGuardAccessor(linear) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].norm1.linear, 140533116405936) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].norm1.linear.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[13].norm1.linear.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].norm1.linear.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].norm1.linear.training, 140591004393408) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].norm1.linear._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].norm1.linear.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].norm1.linear.lora_A, 140533116418560) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].norm1.linear.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].norm1.linear.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].norm1.linear.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].norm1.linear.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].norm1.linear.lora_A['default_0'], 140533116414432) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].norm1.linear.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].norm1.linear.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].norm1.linear.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].norm1.linear.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].norm1.linear.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].norm1.linear.lora_A['default_0'].weight, 140537315813040) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].norm1.linear.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].norm1.linear.lora_B, 140533116415824) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].norm1.linear.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].norm1.linear.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].norm1.linear.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].norm1.linear.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].norm1.linear.lora_B['default_0'], 140533116415152) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].norm1.linear.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].norm1.linear.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].norm1.linear.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].norm1.linear.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].norm1.linear.base_layer, 140581769895248) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].norm1.linear.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].norm1.linear.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].norm1.linear.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].norm1.linear.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].norm1.linear.lora_dropout, 140533116416064) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].norm1.linear.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].norm1.linear.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].norm1.linear.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].norm1.linear.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].norm1.linear.lora_dropout['default_0'], 140533116414912) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].norm1.linear.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].norm1.linear.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].norm1.linear.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].norm1.linear.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[13].norm1.linear.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[13].norm1.linear.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].norm1.linear.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[13].norm1.linear.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].norm1.linear.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[13].norm1.linear.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[13].norm1.linear.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].norm1.linear.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].norm1.linear.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].norm1.linear._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].norm1.linear._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].norm1.linear.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[13].norm1.linear.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[13].norm1.linear.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].norm1.linear._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].norm1.linear._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].norm1.linear._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].norm1.linear._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].norm1.linear._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[13].norm1.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[13].norm1.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].norm1._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].norm1._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].norm1._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].norm1._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].norm2, accessed_by=DictGetItemGuardAccessor(norm2) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].norm2, 140581769896496) # norm_hidden_states = self.norm2(hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:182 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].norm2.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].norm2.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].norm2.training, 140591004393440) # norm_hidden_states = self.norm2(hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:182 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff_context, accessed_by=DictGetItemGuardAccessor(ff_context) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].ff_context, 140581769896832) # context_ff_output = self.ff_context(norm_encoder_hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:198 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff_context.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[13].ff_context.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff_context.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].ff_context.training, 140591004393440) # context_ff_output = self.ff_context(norm_encoder_hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:198 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff_context._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff_context.net, accessed_by=DictGetItemGuardAccessor(net) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].ff_context.net, 140581769896976) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[13].ff_context.net, 93831537618768) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- LENGTH_CHECK: len(L['self'].transformer_blocks[13].ff_context.net) == 3 # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff_context.net.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff_context.net.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].ff_context.net.training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff_context.net[0], accessed_by=GetItemGuardAccessor(0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].ff_context.net[0], 140581769896928) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff_context.net[0].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[13].ff_context.net[0].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff_context.net[0].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].ff_context.net[0].training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff_context.net[0]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff_context.net[0].proj, accessed_by=DictGetItemGuardAccessor(proj) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].ff_context.net[0].proj, 140533116365808) # hidden_states = self.proj(hidden_states) # diffusers/src/diffusers/models/activations.py:88 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff_context.net[0].proj.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[13].ff_context.net[0].proj.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff_context.net[0].proj.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].ff_context.net[0].proj.training, 140591004393408) # hidden_states = self.proj(hidden_states) # diffusers/src/diffusers/models/activations.py:88 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff_context.net[0].proj._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff_context.net[0].proj.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].ff_context.net[0].proj.lora_A, 140533116355872) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff_context.net[0].proj.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff_context.net[0].proj.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].ff_context.net[0].proj.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff_context.net[0].proj.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].ff_context.net[0].proj.lora_A['default_0'], 140533116368736) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff_context.net[0].proj.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff_context.net[0].proj.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].ff_context.net[0].proj.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff_context.net[0].proj.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff_context.net[0].proj.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].ff_context.net[0].proj.lora_A['default_0'].weight, 140537315505904) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff_context.net[0].proj.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].ff_context.net[0].proj.lora_B, 140533116355680) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff_context.net[0].proj.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff_context.net[0].proj.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].ff_context.net[0].proj.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff_context.net[0].proj.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].ff_context.net[0].proj.lora_B['default_0'], 140533116369696) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff_context.net[0].proj.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff_context.net[0].proj.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].ff_context.net[0].proj.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff_context.net[0].proj.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].ff_context.net[0].proj.base_layer, 140581769897024) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff_context.net[0].proj.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff_context.net[0].proj.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].ff_context.net[0].proj.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff_context.net[0].proj.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].ff_context.net[0].proj.lora_dropout, 140533116357552) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff_context.net[0].proj.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff_context.net[0].proj.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].ff_context.net[0].proj.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff_context.net[0].proj.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].ff_context.net[0].proj.lora_dropout['default_0'], 140533116354912) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff_context.net[0].proj.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff_context.net[0].proj.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].ff_context.net[0].proj.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff_context.net[0].proj.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[13].ff_context.net[0].proj.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[13].ff_context.net[0].proj.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff_context.net[0].proj.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[13].ff_context.net[0].proj.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff_context.net[0].proj.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[13].ff_context.net[0].proj.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[13].ff_context.net[0].proj.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff_context.net[0].proj.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].ff_context.net[0].proj.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff_context.net[0].proj._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff_context.net[0].proj._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff_context.net[0].proj.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[13].ff_context.net[0].proj.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[13].ff_context.net[0].proj.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff_context.net[0].proj._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].ff_context.net[0].proj._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff_context.net[0].proj._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff_context.net[0].proj._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff_context.net[0].proj._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[13].ff_context.net[0].proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[13].ff_context.net[0].proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff_context.net[0].approximate, accessed_by=DictGetItemGuardAccessor(approximate) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[13].ff_context.net[0].approximate == 'tanh' # return F.gelu(gate, approximate=self.approximate) # diffusers/src/diffusers/models/activations.py:83 in gelu V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff_context.net[0]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff_context.net[0]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff_context.net[0]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff_context.net[0]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff_context.net[1], accessed_by=GetItemGuardAccessor(1) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].ff_context.net[1], 140581769897120) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff_context.net[1].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff_context.net[1].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].ff_context.net[1].training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff_context.net[2], accessed_by=GetItemGuardAccessor(2) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].ff_context.net[2], 140533116358656) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff_context.net[2].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[13].ff_context.net[2].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff_context.net[2].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].ff_context.net[2].training, 140591004393408) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff_context.net[2]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff_context.net[2].lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].ff_context.net[2].lora_A, 140533116355296) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff_context.net[2].lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff_context.net[2].lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].ff_context.net[2].lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff_context.net[2].lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].ff_context.net[2].lora_A['default_0'], 140533115746192) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff_context.net[2].lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff_context.net[2].lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].ff_context.net[2].lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff_context.net[2].lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff_context.net[2].lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].ff_context.net[2].lora_A['default_0'].weight, 140537315507264) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff_context.net[2].lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].ff_context.net[2].lora_B, 140533116364656) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff_context.net[2].lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff_context.net[2].lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].ff_context.net[2].lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff_context.net[2].lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].ff_context.net[2].lora_B['default_0'], 140533115746384) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff_context.net[2].lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff_context.net[2].lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].ff_context.net[2].lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff_context.net[2].base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].ff_context.net[2].base_layer, 140581769897168) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff_context.net[2].base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff_context.net[2].base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].ff_context.net[2].base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff_context.net[2].lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].ff_context.net[2].lora_dropout, 140533116369168) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff_context.net[2].lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff_context.net[2].lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].ff_context.net[2].lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff_context.net[2].lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].ff_context.net[2].lora_dropout['default_0'], 140533116365328) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff_context.net[2].lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff_context.net[2].lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].ff_context.net[2].lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff_context.net[2].scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[13].ff_context.net[2].scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[13].ff_context.net[2].scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff_context.net[2].scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[13].ff_context.net[2].scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff_context.net[2].use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[13].ff_context.net[2].use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[13].ff_context.net[2].use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff_context.net[2].use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].ff_context.net[2].use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff_context.net[2]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff_context.net[2]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff_context.net[2].merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[13].ff_context.net[2].merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[13].ff_context.net[2].merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff_context.net[2]._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].ff_context.net[2]._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff_context.net[2]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff_context.net[2]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff_context.net[2]._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[13].ff_context.net[2]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[13].ff_context.net[2]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff_context._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff_context._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff_context._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff_context._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].norm1_context, accessed_by=DictGetItemGuardAccessor(norm1_context) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].norm1_context, 140581769895344) # norm_encoder_hidden_states, c_gate_msa, c_shift_mlp, c_scale_mlp, c_gate_mlp = self.norm1_context( # diffusers/src/diffusers/models/transformers/transformer_flux.py:167 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].norm1_context.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[13].norm1_context.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].norm1_context.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].norm1_context.training, 140591004393440) # norm_encoder_hidden_states, c_gate_msa, c_shift_mlp, c_scale_mlp, c_gate_mlp = self.norm1_context( # diffusers/src/diffusers/models/transformers/transformer_flux.py:167 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].norm1_context.emb, accessed_by=DictGetItemGuardAccessor(emb) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].norm1_context.emb, 140591004478624) # if self.emb is not None: # diffusers/src/diffusers/models/normalization.py:135 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].norm1_context._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].norm1_context.norm, accessed_by=DictGetItemGuardAccessor(norm) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].norm1_context.norm, 140581769895536) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:139 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].norm1_context.norm.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].norm1_context.norm.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].norm1_context.norm.training, 140591004393440) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:139 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].norm1_context.silu, accessed_by=DictGetItemGuardAccessor(silu) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].norm1_context.silu, 140581769895440) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].norm1_context.silu.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].norm1_context.silu.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].norm1_context.silu.training, 140591004393440) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].norm1_context.linear, accessed_by=DictGetItemGuardAccessor(linear) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].norm1_context.linear, 140533116415872) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].norm1_context.linear.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[13].norm1_context.linear.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].norm1_context.linear.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].norm1_context.linear.training, 140591004393408) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].norm1_context.linear._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].norm1_context.linear.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].norm1_context.linear.lora_A, 140533116415248) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].norm1_context.linear.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].norm1_context.linear.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].norm1_context.linear.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].norm1_context.linear.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].norm1_context.linear.lora_A['default_0'], 140533116409968) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].norm1_context.linear.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].norm1_context.linear.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].norm1_context.linear.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].norm1_context.linear.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].norm1_context.linear.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].norm1_context.linear.lora_A['default_0'].weight, 140537315809120) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].norm1_context.linear.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].norm1_context.linear.lora_B, 140533116416208) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].norm1_context.linear.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].norm1_context.linear.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].norm1_context.linear.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].norm1_context.linear.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].norm1_context.linear.lora_B['default_0'], 140533116414096) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].norm1_context.linear.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].norm1_context.linear.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].norm1_context.linear.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].norm1_context.linear.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].norm1_context.linear.base_layer, 140581769895488) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].norm1_context.linear.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].norm1_context.linear.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].norm1_context.linear.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].norm1_context.linear.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].norm1_context.linear.lora_dropout, 140533116415008) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].norm1_context.linear.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].norm1_context.linear.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].norm1_context.linear.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].norm1_context.linear.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].norm1_context.linear.lora_dropout['default_0'], 140533116414480) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].norm1_context.linear.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].norm1_context.linear.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].norm1_context.linear.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].norm1_context.linear.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[13].norm1_context.linear.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[13].norm1_context.linear.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].norm1_context.linear.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[13].norm1_context.linear.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].norm1_context.linear.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[13].norm1_context.linear.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[13].norm1_context.linear.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].norm1_context.linear.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].norm1_context.linear.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].norm1_context.linear._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].norm1_context.linear._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].norm1_context.linear.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[13].norm1_context.linear.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[13].norm1_context.linear.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].norm1_context.linear._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].norm1_context.linear._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].norm1_context.linear._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].norm1_context.linear._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].norm1_context.linear._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[13].norm1_context.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[13].norm1_context.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].norm1_context._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].norm1_context._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].norm1_context._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].norm1_context._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].norm2_context, accessed_by=DictGetItemGuardAccessor(norm2_context) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].norm2_context, 140581769896544) # norm_encoder_hidden_states = self.norm2_context(encoder_hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:195 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].norm2_context.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].norm2_context.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].norm2_context.training, 140591004393440) # norm_encoder_hidden_states = self.norm2_context(encoder_hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:195 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | +- GuardManager: source=L['self'].transformer_blocks[14], accessed_by=GetItemGuardAccessor(14) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14], 140581769894816) # for index_block, block in enumerate(self.transformer_blocks): # diffusers/src/diffusers/models/transformers/transformer_flux.py:471 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[14].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].training, 140591004393440) # for index_block, block in enumerate(self.transformer_blocks): # diffusers/src/diffusers/models/transformers/transformer_flux.py:471 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff, accessed_by=DictGetItemGuardAccessor(ff) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].ff, 140581769898464) # ff_output = self.ff(norm_hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:185 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[14].ff.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].ff.training, 140591004393440) # ff_output = self.ff(norm_hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:185 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff.net, accessed_by=DictGetItemGuardAccessor(net) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].ff.net, 140581769898704) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[14].ff.net, 93831537618768) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- LENGTH_CHECK: len(L['self'].transformer_blocks[14].ff.net) == 3 # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff.net.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff.net.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].ff.net.training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff.net[0], accessed_by=GetItemGuardAccessor(0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].ff.net[0], 140581769898656) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff.net[0].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[14].ff.net[0].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff.net[0].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].ff.net[0].training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff.net[0]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff.net[0].proj, accessed_by=DictGetItemGuardAccessor(proj) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].ff.net[0].proj, 140533115966352) # hidden_states = self.proj(hidden_states) # diffusers/src/diffusers/models/activations.py:88 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff.net[0].proj.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[14].ff.net[0].proj.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff.net[0].proj.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].ff.net[0].proj.training, 140591004393408) # hidden_states = self.proj(hidden_states) # diffusers/src/diffusers/models/activations.py:88 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff.net[0].proj._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff.net[0].proj.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].ff.net[0].proj.lora_A, 140533115965200) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff.net[0].proj.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff.net[0].proj.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].ff.net[0].proj.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff.net[0].proj.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].ff.net[0].proj.lora_A['default_0'], 140533116612528) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff.net[0].proj.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff.net[0].proj.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].ff.net[0].proj.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff.net[0].proj.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff.net[0].proj.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].ff.net[0].proj.lora_A['default_0'].weight, 140537315216736) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff.net[0].proj.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].ff.net[0].proj.lora_B, 140533115969568) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff.net[0].proj.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff.net[0].proj.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].ff.net[0].proj.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff.net[0].proj.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].ff.net[0].proj.lora_B['default_0'], 140533116607488) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff.net[0].proj.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff.net[0].proj.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].ff.net[0].proj.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff.net[0].proj.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].ff.net[0].proj.base_layer, 140581769898752) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff.net[0].proj.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff.net[0].proj.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].ff.net[0].proj.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff.net[0].proj.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].ff.net[0].proj.lora_dropout, 140533115971104) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff.net[0].proj.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff.net[0].proj.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].ff.net[0].proj.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff.net[0].proj.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].ff.net[0].proj.lora_dropout['default_0'], 140533115973840) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff.net[0].proj.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff.net[0].proj.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].ff.net[0].proj.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff.net[0].proj.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[14].ff.net[0].proj.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[14].ff.net[0].proj.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff.net[0].proj.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[14].ff.net[0].proj.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff.net[0].proj.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[14].ff.net[0].proj.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[14].ff.net[0].proj.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff.net[0].proj.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].ff.net[0].proj.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff.net[0].proj._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff.net[0].proj._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff.net[0].proj.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[14].ff.net[0].proj.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[14].ff.net[0].proj.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff.net[0].proj._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].ff.net[0].proj._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff.net[0].proj._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff.net[0].proj._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff.net[0].proj._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[14].ff.net[0].proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[14].ff.net[0].proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff.net[0].approximate, accessed_by=DictGetItemGuardAccessor(approximate) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[14].ff.net[0].approximate == 'tanh' # return F.gelu(gate, approximate=self.approximate) # diffusers/src/diffusers/models/activations.py:83 in gelu V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff.net[0]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff.net[0]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff.net[0]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff.net[0]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff.net[1], accessed_by=GetItemGuardAccessor(1) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].ff.net[1], 140581769898800) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff.net[1].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff.net[1].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].ff.net[1].training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff.net[2], accessed_by=GetItemGuardAccessor(2) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].ff.net[2], 140533116608976) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff.net[2].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[14].ff.net[2].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff.net[2].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].ff.net[2].training, 140591004393408) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff.net[2]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff.net[2].lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].ff.net[2].lora_A, 140533116473824) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff.net[2].lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff.net[2].lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].ff.net[2].lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff.net[2].lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].ff.net[2].lora_A['default_0'], 140533116485440) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff.net[2].lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff.net[2].lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].ff.net[2].lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff.net[2].lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff.net[2].lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].ff.net[2].lora_A['default_0'].weight, 140537315229136) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff.net[2].lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].ff.net[2].lora_B, 140533116482896) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff.net[2].lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff.net[2].lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].ff.net[2].lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff.net[2].lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].ff.net[2].lora_B['default_0'], 140533116485584) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff.net[2].lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff.net[2].lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].ff.net[2].lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff.net[2].base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].ff.net[2].base_layer, 140581769898848) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff.net[2].base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff.net[2].base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].ff.net[2].base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff.net[2].lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].ff.net[2].lora_dropout, 140533116611712) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff.net[2].lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff.net[2].lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].ff.net[2].lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff.net[2].lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].ff.net[2].lora_dropout['default_0'], 140533116607152) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff.net[2].lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff.net[2].lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].ff.net[2].lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff.net[2].scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[14].ff.net[2].scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[14].ff.net[2].scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff.net[2].scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[14].ff.net[2].scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff.net[2].use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[14].ff.net[2].use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[14].ff.net[2].use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff.net[2].use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].ff.net[2].use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff.net[2]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff.net[2]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff.net[2].merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[14].ff.net[2].merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[14].ff.net[2].merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff.net[2]._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].ff.net[2]._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff.net[2]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff.net[2]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff.net[2]._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[14].ff.net[2]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[14].ff.net[2]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn, accessed_by=DictGetItemGuardAccessor(attn) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn, 140581769897696) # attn_output, context_attn_output = self.attn( # diffusers/src/diffusers/models/transformers/transformer_flux.py:172 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[14].attn.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.training, 140591004393440) # attn_output, context_attn_output = self.attn( # diffusers/src/diffusers/models/transformers/transformer_flux.py:172 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_k, accessed_by=DictGetItemGuardAccessor(to_k) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.to_k, 140533116283264) # key = attn.to_k(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1717 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[14].attn.to_k.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.to_k.training, 140591004393408) # key = attn.to_k(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1717 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_k._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_k.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.to_k.lora_A, 140533116280960) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_k.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_k.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.to_k.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_k.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.to_k.lora_A['default_0'], 140533116276112) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_k.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_k.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.to_k.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_k.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_k.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.to_k.lora_A['default_0'].weight, 140537315361152) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_k.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.to_k.lora_B, 140533116280336) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_k.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_k.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.to_k.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_k.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.to_k.lora_B['default_0'], 140533116278224) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_k.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_k.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.to_k.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_k.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.to_k.base_layer, 140581769897840) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_k.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_k.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.to_k.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_k.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.to_k.lora_dropout, 140533116281152) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_k.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_k.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.to_k.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_k.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.to_k.lora_dropout['default_0'], 140533116286528) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_k.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_k.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.to_k.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_k.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[14].attn.to_k.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[14].attn.to_k.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_k.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[14].attn.to_k.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_k.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[14].attn.to_k.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[14].attn.to_k.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_k.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.to_k.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_k._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_k._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_k.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[14].attn.to_k.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[14].attn.to_k.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_k._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.to_k._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_k._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_k._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_k._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[14].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[14].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_q, accessed_by=DictGetItemGuardAccessor(to_q) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.to_q, 140533116283744) # query = attn.to_q(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1716 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[14].attn.to_q.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.to_q.training, 140591004393408) # query = attn.to_q(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1716 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_q._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_q.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.to_q.lora_A, 140533116284128) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_q.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_q.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.to_q.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_q.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.to_q.lora_A['default_0'], 140533116286288) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_q.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_q.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.to_q.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_q.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_q.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.to_q.lora_A['default_0'].weight, 140537315495584) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_q.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.to_q.lora_B, 140533116284080) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_q.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_q.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.to_q.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_q.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.to_q.lora_B['default_0'], 140533116279376) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_q.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_q.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.to_q.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_q.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.to_q.base_layer, 140581769897936) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_q.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_q.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.to_q.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_q.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.to_q.lora_dropout, 140533116287536) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_q.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_q.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.to_q.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_q.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.to_q.lora_dropout['default_0'], 140533116273616) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_q.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_q.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.to_q.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_q.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[14].attn.to_q.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[14].attn.to_q.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_q.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[14].attn.to_q.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_q.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[14].attn.to_q.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[14].attn.to_q.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_q.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.to_q.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_q._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_q._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_q.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[14].attn.to_q.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[14].attn.to_q.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_q._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.to_q._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_q._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_q._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_q._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[14].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[14].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_v, accessed_by=DictGetItemGuardAccessor(to_v) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.to_v, 140533116191408) # value = attn.to_v(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1718 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_v.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[14].attn.to_v.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_v.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.to_v.training, 140591004393408) # value = attn.to_v(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1718 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_v._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_v.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.to_v.lora_A, 140533116198416) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_v.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_v.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.to_v.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_v.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.to_v.lora_A['default_0'], 140533116198368) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_v.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_v.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.to_v.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_v.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_v.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.to_v.lora_A['default_0'].weight, 140537315374592) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_v.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.to_v.lora_B, 140533116198704) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_v.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_v.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.to_v.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_v.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.to_v.lora_B['default_0'], 140533116200768) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_v.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_v.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.to_v.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_v.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.to_v.base_layer, 140581769898032) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_v.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_v.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.to_v.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_v.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.to_v.lora_dropout, 140533116198032) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_v.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_v.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.to_v.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_v.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.to_v.lora_dropout['default_0'], 140533116197696) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_v.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_v.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.to_v.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_v.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[14].attn.to_v.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[14].attn.to_v.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_v.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[14].attn.to_v.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_v.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[14].attn.to_v.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[14].attn.to_v.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_v.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.to_v.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_v._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_v._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_v.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[14].attn.to_v.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[14].attn.to_v.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_v._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.to_v._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_v._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_v._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_v._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[14].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[14].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.norm_k, accessed_by=DictGetItemGuardAccessor(norm_k) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.norm_k, 140581769897888) # if attn.norm_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1729 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.norm_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[14].attn.norm_k.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.norm_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.norm_k.training, 140591004393440) # if attn.norm_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1729 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.norm_k.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[14].attn.norm_k.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.norm_k._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.norm_k.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.norm_k.weight, 140581765999456) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.norm_k._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.norm_k._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.norm_k._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.norm_k._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.norm_q, accessed_by=DictGetItemGuardAccessor(norm_q) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.norm_q, 140581769897792) # if attn.norm_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1727 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.norm_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[14].attn.norm_q.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.norm_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.norm_q.training, 140591004393440) # if attn.norm_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1727 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.norm_q.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[14].attn.norm_q.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.norm_q._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.norm_q.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.norm_q.weight, 140581772710976) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.norm_q._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.norm_q._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.norm_q._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.norm_q._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_out, accessed_by=DictGetItemGuardAccessor(to_out) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.to_out, 140581769898224) # hidden_states = attn.to_out[0](hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1776 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_out.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_out.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.to_out.training, 140591004393440) # hidden_states = attn.to_out[0](hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1776 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_out[0], accessed_by=GetItemGuardAccessor(0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.to_out[0], 140533115013728) # hidden_states = attn.to_out[0](hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1776 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_out[0].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[14].attn.to_out[0].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_out[0].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.to_out[0].training, 140591004393408) # hidden_states = attn.to_out[0](hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1776 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_out[0]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_out[0].lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.to_out[0].lora_A, 140533115017136) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_out[0].lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_out[0].lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.to_out[0].lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_out[0].lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.to_out[0].lora_A['default_0'], 140533115024480) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_out[0].lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_out[0].lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.to_out[0].lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_out[0].lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_out[0].lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.to_out[0].lora_A['default_0'].weight, 140537315361312) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_out[0].lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.to_out[0].lora_B, 140533115012192) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_out[0].lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_out[0].lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.to_out[0].lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_out[0].lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.to_out[0].lora_B['default_0'], 140533115015408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_out[0].lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_out[0].lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.to_out[0].lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_out[0].base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.to_out[0].base_layer, 140581769898272) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_out[0].base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_out[0].base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.to_out[0].base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_out[0].lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.to_out[0].lora_dropout, 140533115013248) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_out[0].lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_out[0].lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.to_out[0].lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_out[0].lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.to_out[0].lora_dropout['default_0'], 140533115013056) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_out[0].lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_out[0].lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.to_out[0].lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_out[0].scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[14].attn.to_out[0].scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[14].attn.to_out[0].scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_out[0].scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[14].attn.to_out[0].scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_out[0].use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[14].attn.to_out[0].use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[14].attn.to_out[0].use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_out[0].use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.to_out[0].use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_out[0]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_out[0]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_out[0].merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[14].attn.to_out[0].merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[14].attn.to_out[0].merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_out[0]._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.to_out[0]._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_out[0]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_out[0]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_out[0]._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[14].attn.to_out[0]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[14].attn.to_out[0]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_out[1], accessed_by=GetItemGuardAccessor(1) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.to_out[1], 140581769898320) # hidden_states = attn.to_out[1](hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1778 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_out[1].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_out[1].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.to_out[1].training, 140591004393440) # hidden_states = attn.to_out[1](hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1778 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.add_k_proj, accessed_by=DictGetItemGuardAccessor(add_k_proj) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.add_k_proj, 140533116201152) # encoder_hidden_states_key_proj = attn.add_k_proj(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1736 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.add_k_proj.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[14].attn.add_k_proj.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.add_k_proj.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.add_k_proj.training, 140591004393408) # encoder_hidden_states_key_proj = attn.add_k_proj(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1736 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.add_k_proj._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.add_k_proj.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.add_k_proj.lora_A, 140533116197408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.add_k_proj.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.add_k_proj.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.add_k_proj.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.add_k_proj.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.add_k_proj.lora_A['default_0'], 140533116197024) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.add_k_proj.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.add_k_proj.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.add_k_proj.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.add_k_proj.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.add_k_proj.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.add_k_proj.lora_A['default_0'].weight, 140537315373552) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.add_k_proj.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.add_k_proj.lora_B, 140533116197648) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.add_k_proj.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.add_k_proj.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.add_k_proj.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.add_k_proj.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.add_k_proj.lora_B['default_0'], 140533116199424) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.add_k_proj.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.add_k_proj.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.add_k_proj.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.add_k_proj.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.add_k_proj.base_layer, 140581769898080) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.add_k_proj.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.add_k_proj.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.add_k_proj.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.add_k_proj.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.add_k_proj.lora_dropout, 140533116201296) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.add_k_proj.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.add_k_proj.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.add_k_proj.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.add_k_proj.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.add_k_proj.lora_dropout['default_0'], 140533116200480) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.add_k_proj.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.add_k_proj.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.add_k_proj.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.add_k_proj.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[14].attn.add_k_proj.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[14].attn.add_k_proj.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.add_k_proj.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[14].attn.add_k_proj.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.add_k_proj.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[14].attn.add_k_proj.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[14].attn.add_k_proj.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.add_k_proj.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.add_k_proj.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.add_k_proj._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.add_k_proj._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.add_k_proj.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[14].attn.add_k_proj.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[14].attn.add_k_proj.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.add_k_proj._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.add_k_proj._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.add_k_proj._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.add_k_proj._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.add_k_proj._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[14].attn.add_k_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[14].attn.add_k_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.add_q_proj, accessed_by=DictGetItemGuardAccessor(add_q_proj) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.add_q_proj, 140533115015120) # encoder_hidden_states_query_proj = attn.add_q_proj(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1735 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.add_q_proj.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[14].attn.add_q_proj.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.add_q_proj.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.add_q_proj.training, 140591004393408) # encoder_hidden_states_query_proj = attn.add_q_proj(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1735 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.add_q_proj._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.add_q_proj.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.add_q_proj.lora_A, 140533115017328) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.add_q_proj.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.add_q_proj.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.add_q_proj.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.add_q_proj.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.add_q_proj.lora_A['default_0'], 140533115024000) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.add_q_proj.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.add_q_proj.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.add_q_proj.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.add_q_proj.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.add_q_proj.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.add_q_proj.lora_A['default_0'].weight, 140537315372032) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.add_q_proj.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.add_q_proj.lora_B, 140533115022656) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.add_q_proj.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.add_q_proj.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.add_q_proj.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.add_q_proj.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.add_q_proj.lora_B['default_0'], 140533115024624) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.add_q_proj.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.add_q_proj.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.add_q_proj.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.add_q_proj.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.add_q_proj.base_layer, 140581769898176) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.add_q_proj.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.add_q_proj.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.add_q_proj.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.add_q_proj.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.add_q_proj.lora_dropout, 140533115023856) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.add_q_proj.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.add_q_proj.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.add_q_proj.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.add_q_proj.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.add_q_proj.lora_dropout['default_0'], 140533115012816) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.add_q_proj.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.add_q_proj.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.add_q_proj.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.add_q_proj.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[14].attn.add_q_proj.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[14].attn.add_q_proj.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.add_q_proj.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[14].attn.add_q_proj.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.add_q_proj.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[14].attn.add_q_proj.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[14].attn.add_q_proj.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.add_q_proj.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.add_q_proj.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.add_q_proj._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.add_q_proj._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.add_q_proj.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[14].attn.add_q_proj.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[14].attn.add_q_proj.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.add_q_proj._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.add_q_proj._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.add_q_proj._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.add_q_proj._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.add_q_proj._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[14].attn.add_q_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[14].attn.add_q_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.add_v_proj, accessed_by=DictGetItemGuardAccessor(add_v_proj) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.add_v_proj, 140533116194192) # encoder_hidden_states_value_proj = attn.add_v_proj(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1737 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.add_v_proj.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[14].attn.add_v_proj.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.add_v_proj.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.add_v_proj.training, 140591004393408) # encoder_hidden_states_value_proj = attn.add_v_proj(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1737 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.add_v_proj._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.add_v_proj.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.add_v_proj.lora_A, 140533116196544) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.add_v_proj.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.add_v_proj.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.add_v_proj.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.add_v_proj.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.add_v_proj.lora_A['default_0'], 140533116196160) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.add_v_proj.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.add_v_proj.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.add_v_proj.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.add_v_proj.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.add_v_proj.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.add_v_proj.lora_A['default_0'].weight, 140537315362672) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.add_v_proj.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.add_v_proj.lora_B, 140533116199520) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.add_v_proj.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.add_v_proj.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.add_v_proj.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.add_v_proj.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.add_v_proj.lora_B['default_0'], 140533116194960) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.add_v_proj.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.add_v_proj.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.add_v_proj.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.add_v_proj.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.add_v_proj.base_layer, 140581769898128) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.add_v_proj.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.add_v_proj.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.add_v_proj.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.add_v_proj.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.add_v_proj.lora_dropout, 140533116194432) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.add_v_proj.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.add_v_proj.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.add_v_proj.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.add_v_proj.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.add_v_proj.lora_dropout['default_0'], 140533116193328) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.add_v_proj.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.add_v_proj.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.add_v_proj.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.add_v_proj.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[14].attn.add_v_proj.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[14].attn.add_v_proj.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.add_v_proj.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[14].attn.add_v_proj.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.add_v_proj.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[14].attn.add_v_proj.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[14].attn.add_v_proj.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.add_v_proj.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.add_v_proj.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.add_v_proj._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.add_v_proj._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.add_v_proj.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[14].attn.add_v_proj.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[14].attn.add_v_proj.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.add_v_proj._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.add_v_proj._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.add_v_proj._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.add_v_proj._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.add_v_proj._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[14].attn.add_v_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[14].attn.add_v_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_add_out, accessed_by=DictGetItemGuardAccessor(to_add_out) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.to_add_out, 140533115012912) # encoder_hidden_states = attn.to_add_out(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1779 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_add_out.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[14].attn.to_add_out.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_add_out.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.to_add_out.training, 140591004393408) # encoder_hidden_states = attn.to_add_out(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1779 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_add_out._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_add_out.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.to_add_out.lora_A, 140533115024240) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_add_out.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_add_out.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.to_add_out.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_add_out.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.to_add_out.lora_A['default_0'], 140533115965008) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_add_out.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_add_out.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.to_add_out.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_add_out.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_add_out.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.to_add_out.lora_A['default_0'].weight, 140537315366832) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_add_out.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.to_add_out.lora_B, 140533115973936) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_add_out.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_add_out.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.to_add_out.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_add_out.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.to_add_out.lora_B['default_0'], 140533115972112) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_add_out.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_add_out.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.to_add_out.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_add_out.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.to_add_out.base_layer, 140581769898368) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_add_out.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_add_out.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.to_add_out.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_add_out.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.to_add_out.lora_dropout, 140533115015888) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_add_out.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_add_out.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.to_add_out.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_add_out.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.to_add_out.lora_dropout['default_0'], 140533115021072) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_add_out.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_add_out.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.to_add_out.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_add_out.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[14].attn.to_add_out.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[14].attn.to_add_out.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_add_out.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[14].attn.to_add_out.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_add_out.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[14].attn.to_add_out.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[14].attn.to_add_out.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_add_out.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.to_add_out.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_add_out._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_add_out._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_add_out.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[14].attn.to_add_out.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[14].attn.to_add_out.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_add_out._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.to_add_out._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_add_out._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_add_out._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_add_out._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[14].attn.to_add_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[14].attn.to_add_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.norm_added_k, accessed_by=DictGetItemGuardAccessor(norm_added_k) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.norm_added_k, 140581769898512) # if attn.norm_added_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1751 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.norm_added_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[14].attn.norm_added_k.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.norm_added_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.norm_added_k.training, 140591004393440) # if attn.norm_added_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1751 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.norm_added_k.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[14].attn.norm_added_k.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.norm_added_k._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.norm_added_k.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.norm_added_k.weight, 140581765999296) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.norm_added_k._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.norm_added_k._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.norm_added_k._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.norm_added_k._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.norm_added_q, accessed_by=DictGetItemGuardAccessor(norm_added_q) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.norm_added_q, 140581769898416) # if attn.norm_added_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1749 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.norm_added_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[14].attn.norm_added_q.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.norm_added_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.norm_added_q.training, 140591004393440) # if attn.norm_added_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1749 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.norm_added_q.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[14].attn.norm_added_q.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.norm_added_q._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.norm_added_q.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.norm_added_q.weight, 140581765999376) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.norm_added_q._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.norm_added_q._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.norm_added_q._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.norm_added_q._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.heads, accessed_by=DictGetItemGuardAccessor(heads) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[14].attn.heads == 24 # head_dim = inner_dim // attn.heads # diffusers/src/diffusers/models/attention_processor.py:1721 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.processor, accessed_by=DictGetItemGuardAccessor(processor) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[14].attn.processor, 93831581524080) # attn_parameters = set(inspect.signature(self.processor.__call__).parameters.keys()) # diffusers/src/diffusers/models/attention_processor.py:479 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.processor, 140581769897648) # return self.processor( # diffusers/src/diffusers/models/attention_processor.py:490 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].norm1, accessed_by=DictGetItemGuardAccessor(norm1) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].norm1, 140581769897216) # norm_hidden_states, gate_msa, shift_mlp, scale_mlp, gate_mlp = self.norm1(hidden_states, emb=temb) # diffusers/src/diffusers/models/transformers/transformer_flux.py:165 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].norm1.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[14].norm1.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].norm1.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].norm1.training, 140591004393440) # norm_hidden_states, gate_msa, shift_mlp, scale_mlp, gate_mlp = self.norm1(hidden_states, emb=temb) # diffusers/src/diffusers/models/transformers/transformer_flux.py:165 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].norm1.emb, accessed_by=DictGetItemGuardAccessor(emb) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].norm1.emb, 140591004478624) # if self.emb is not None: # diffusers/src/diffusers/models/normalization.py:135 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].norm1._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].norm1.norm, accessed_by=DictGetItemGuardAccessor(norm) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].norm1.norm, 140581769897360) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:139 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].norm1.norm.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].norm1.norm.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].norm1.norm.training, 140591004393440) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:139 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].norm1.silu, accessed_by=DictGetItemGuardAccessor(silu) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].norm1.silu, 140581769897264) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].norm1.silu.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].norm1.silu.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].norm1.silu.training, 140591004393440) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].norm1.linear, accessed_by=DictGetItemGuardAccessor(linear) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].norm1.linear, 140533115745712) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].norm1.linear.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[14].norm1.linear.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].norm1.linear.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].norm1.linear.training, 140591004393408) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].norm1.linear._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].norm1.linear.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].norm1.linear.lora_A, 140533115740000) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].norm1.linear.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].norm1.linear.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].norm1.linear.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].norm1.linear.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].norm1.linear.lora_A['default_0'], 140533115732944) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].norm1.linear.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].norm1.linear.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].norm1.linear.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].norm1.linear.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].norm1.linear.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].norm1.linear.lora_A['default_0'].weight, 140537315495664) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].norm1.linear.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].norm1.linear.lora_B, 140533115734912) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].norm1.linear.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].norm1.linear.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].norm1.linear.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].norm1.linear.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].norm1.linear.lora_B['default_0'], 140533115742112) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].norm1.linear.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].norm1.linear.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].norm1.linear.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].norm1.linear.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].norm1.linear.base_layer, 140581769897312) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].norm1.linear.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].norm1.linear.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].norm1.linear.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].norm1.linear.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].norm1.linear.lora_dropout, 140533115736352) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].norm1.linear.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].norm1.linear.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].norm1.linear.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].norm1.linear.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].norm1.linear.lora_dropout['default_0'], 140533115742496) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].norm1.linear.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].norm1.linear.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].norm1.linear.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].norm1.linear.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[14].norm1.linear.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[14].norm1.linear.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].norm1.linear.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[14].norm1.linear.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].norm1.linear.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[14].norm1.linear.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[14].norm1.linear.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].norm1.linear.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].norm1.linear.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].norm1.linear._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].norm1.linear._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].norm1.linear.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[14].norm1.linear.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[14].norm1.linear.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].norm1.linear._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].norm1.linear._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].norm1.linear._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].norm1.linear._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].norm1.linear._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[14].norm1.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[14].norm1.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].norm1._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].norm1._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].norm1._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].norm1._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].norm2, accessed_by=DictGetItemGuardAccessor(norm2) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].norm2, 140581769898560) # norm_hidden_states = self.norm2(hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:182 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].norm2.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].norm2.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].norm2.training, 140591004393440) # norm_hidden_states = self.norm2(hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:182 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff_context, accessed_by=DictGetItemGuardAccessor(ff_context) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].ff_context, 140581769898896) # context_ff_output = self.ff_context(norm_encoder_hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:198 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff_context.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[14].ff_context.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff_context.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].ff_context.training, 140591004393440) # context_ff_output = self.ff_context(norm_encoder_hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:198 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff_context._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff_context.net, accessed_by=DictGetItemGuardAccessor(net) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].ff_context.net, 140581769899040) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[14].ff_context.net, 93831537618768) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- LENGTH_CHECK: len(L['self'].transformer_blocks[14].ff_context.net) == 3 # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff_context.net.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff_context.net.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].ff_context.net.training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff_context.net[0], accessed_by=GetItemGuardAccessor(0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].ff_context.net[0], 140581769898992) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff_context.net[0].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[14].ff_context.net[0].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff_context.net[0].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].ff_context.net[0].training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff_context.net[0]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff_context.net[0].proj, accessed_by=DictGetItemGuardAccessor(proj) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].ff_context.net[0].proj, 140533116482704) # hidden_states = self.proj(hidden_states) # diffusers/src/diffusers/models/activations.py:88 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff_context.net[0].proj.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[14].ff_context.net[0].proj.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff_context.net[0].proj.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].ff_context.net[0].proj.training, 140591004393408) # hidden_states = self.proj(hidden_states) # diffusers/src/diffusers/models/activations.py:88 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff_context.net[0].proj._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff_context.net[0].proj.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].ff_context.net[0].proj.lora_A, 140533116485104) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff_context.net[0].proj.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff_context.net[0].proj.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].ff_context.net[0].proj.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff_context.net[0].proj.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].ff_context.net[0].proj.lora_A['default_0'], 140533116482800) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff_context.net[0].proj.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff_context.net[0].proj.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].ff_context.net[0].proj.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff_context.net[0].proj.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff_context.net[0].proj.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].ff_context.net[0].proj.lora_A['default_0'].weight, 140537315214256) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff_context.net[0].proj.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].ff_context.net[0].proj.lora_B, 140533116484816) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff_context.net[0].proj.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff_context.net[0].proj.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].ff_context.net[0].proj.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff_context.net[0].proj.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].ff_context.net[0].proj.lora_B['default_0'], 140533116484192) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff_context.net[0].proj.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff_context.net[0].proj.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].ff_context.net[0].proj.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff_context.net[0].proj.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].ff_context.net[0].proj.base_layer, 140581769899088) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff_context.net[0].proj.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff_context.net[0].proj.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].ff_context.net[0].proj.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff_context.net[0].proj.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].ff_context.net[0].proj.lora_dropout, 140533116482656) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff_context.net[0].proj.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff_context.net[0].proj.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].ff_context.net[0].proj.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff_context.net[0].proj.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].ff_context.net[0].proj.lora_dropout['default_0'], 140533116483040) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff_context.net[0].proj.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff_context.net[0].proj.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].ff_context.net[0].proj.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff_context.net[0].proj.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[14].ff_context.net[0].proj.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[14].ff_context.net[0].proj.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff_context.net[0].proj.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[14].ff_context.net[0].proj.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff_context.net[0].proj.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[14].ff_context.net[0].proj.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[14].ff_context.net[0].proj.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff_context.net[0].proj.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].ff_context.net[0].proj.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff_context.net[0].proj._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff_context.net[0].proj._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff_context.net[0].proj.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[14].ff_context.net[0].proj.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[14].ff_context.net[0].proj.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff_context.net[0].proj._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].ff_context.net[0].proj._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff_context.net[0].proj._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff_context.net[0].proj._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff_context.net[0].proj._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[14].ff_context.net[0].proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[14].ff_context.net[0].proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff_context.net[0].approximate, accessed_by=DictGetItemGuardAccessor(approximate) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[14].ff_context.net[0].approximate == 'tanh' # return F.gelu(gate, approximate=self.approximate) # diffusers/src/diffusers/models/activations.py:83 in gelu V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff_context.net[0]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff_context.net[0]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff_context.net[0]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff_context.net[0]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff_context.net[1], accessed_by=GetItemGuardAccessor(1) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].ff_context.net[1], 140581769899184) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff_context.net[1].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff_context.net[1].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].ff_context.net[1].training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff_context.net[2], accessed_by=GetItemGuardAccessor(2) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].ff_context.net[2], 140533116480112) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff_context.net[2].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[14].ff_context.net[2].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff_context.net[2].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].ff_context.net[2].training, 140591004393408) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff_context.net[2]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff_context.net[2].lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].ff_context.net[2].lora_A, 140533116469984) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff_context.net[2].lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff_context.net[2].lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].ff_context.net[2].lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff_context.net[2].lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].ff_context.net[2].lora_A['default_0'], 140533116474352) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff_context.net[2].lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff_context.net[2].lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].ff_context.net[2].lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff_context.net[2].lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff_context.net[2].lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].ff_context.net[2].lora_A['default_0'].weight, 140537315222256) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff_context.net[2].lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].ff_context.net[2].lora_B, 140533116470944) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff_context.net[2].lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff_context.net[2].lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].ff_context.net[2].lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff_context.net[2].lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].ff_context.net[2].lora_B['default_0'], 140533116474400) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff_context.net[2].lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff_context.net[2].lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].ff_context.net[2].lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff_context.net[2].base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].ff_context.net[2].base_layer, 140581769899232) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff_context.net[2].base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff_context.net[2].base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].ff_context.net[2].base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff_context.net[2].lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].ff_context.net[2].lora_dropout, 140533116480640) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff_context.net[2].lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff_context.net[2].lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].ff_context.net[2].lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff_context.net[2].lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].ff_context.net[2].lora_dropout['default_0'], 140533116480352) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff_context.net[2].lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff_context.net[2].lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].ff_context.net[2].lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff_context.net[2].scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[14].ff_context.net[2].scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[14].ff_context.net[2].scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff_context.net[2].scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[14].ff_context.net[2].scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff_context.net[2].use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[14].ff_context.net[2].use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[14].ff_context.net[2].use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff_context.net[2].use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].ff_context.net[2].use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff_context.net[2]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff_context.net[2]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff_context.net[2].merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[14].ff_context.net[2].merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[14].ff_context.net[2].merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff_context.net[2]._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].ff_context.net[2]._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff_context.net[2]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff_context.net[2]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff_context.net[2]._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[14].ff_context.net[2]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[14].ff_context.net[2]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff_context._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff_context._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff_context._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff_context._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].norm1_context, accessed_by=DictGetItemGuardAccessor(norm1_context) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].norm1_context, 140581769897408) # norm_encoder_hidden_states, c_gate_msa, c_shift_mlp, c_scale_mlp, c_gate_mlp = self.norm1_context( # diffusers/src/diffusers/models/transformers/transformer_flux.py:167 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].norm1_context.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[14].norm1_context.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].norm1_context.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].norm1_context.training, 140591004393440) # norm_encoder_hidden_states, c_gate_msa, c_shift_mlp, c_scale_mlp, c_gate_mlp = self.norm1_context( # diffusers/src/diffusers/models/transformers/transformer_flux.py:167 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].norm1_context.emb, accessed_by=DictGetItemGuardAccessor(emb) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].norm1_context.emb, 140591004478624) # if self.emb is not None: # diffusers/src/diffusers/models/normalization.py:135 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].norm1_context._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].norm1_context.norm, accessed_by=DictGetItemGuardAccessor(norm) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].norm1_context.norm, 140581769897600) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:139 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].norm1_context.norm.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].norm1_context.norm.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].norm1_context.norm.training, 140591004393440) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:139 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].norm1_context.silu, accessed_by=DictGetItemGuardAccessor(silu) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].norm1_context.silu, 140581769897504) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].norm1_context.silu.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].norm1_context.silu.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].norm1_context.silu.training, 140591004393440) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].norm1_context.linear, accessed_by=DictGetItemGuardAccessor(linear) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].norm1_context.linear, 140533115746096) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].norm1_context.linear.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[14].norm1_context.linear.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].norm1_context.linear.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].norm1_context.linear.training, 140591004393408) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].norm1_context.linear._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].norm1_context.linear.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].norm1_context.linear.lora_A, 140533116270240) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].norm1_context.linear.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].norm1_context.linear.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].norm1_context.linear.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].norm1_context.linear.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].norm1_context.linear.lora_A['default_0'], 140533116257904) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].norm1_context.linear.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].norm1_context.linear.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].norm1_context.linear.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].norm1_context.linear.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].norm1_context.linear.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].norm1_context.linear.lora_A['default_0'].weight, 140537315499584) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].norm1_context.linear.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].norm1_context.linear.lora_B, 140533116270288) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].norm1_context.linear.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].norm1_context.linear.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].norm1_context.linear.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].norm1_context.linear.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].norm1_context.linear.lora_B['default_0'], 140533116265632) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].norm1_context.linear.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].norm1_context.linear.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].norm1_context.linear.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].norm1_context.linear.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].norm1_context.linear.base_layer, 140581769897552) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].norm1_context.linear.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].norm1_context.linear.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].norm1_context.linear.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].norm1_context.linear.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].norm1_context.linear.lora_dropout, 140533116270144) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].norm1_context.linear.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].norm1_context.linear.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].norm1_context.linear.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].norm1_context.linear.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].norm1_context.linear.lora_dropout['default_0'], 140533116270336) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].norm1_context.linear.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].norm1_context.linear.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].norm1_context.linear.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].norm1_context.linear.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[14].norm1_context.linear.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[14].norm1_context.linear.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].norm1_context.linear.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[14].norm1_context.linear.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].norm1_context.linear.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[14].norm1_context.linear.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[14].norm1_context.linear.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].norm1_context.linear.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].norm1_context.linear.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].norm1_context.linear._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].norm1_context.linear._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].norm1_context.linear.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[14].norm1_context.linear.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[14].norm1_context.linear.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].norm1_context.linear._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].norm1_context.linear._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].norm1_context.linear._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].norm1_context.linear._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].norm1_context.linear._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[14].norm1_context.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[14].norm1_context.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].norm1_context._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].norm1_context._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].norm1_context._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].norm1_context._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].norm2_context, accessed_by=DictGetItemGuardAccessor(norm2_context) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].norm2_context, 140581769898608) # norm_encoder_hidden_states = self.norm2_context(encoder_hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:195 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].norm2_context.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].norm2_context.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].norm2_context.training, 140591004393440) # norm_encoder_hidden_states = self.norm2_context(encoder_hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:195 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | +- GuardManager: source=L['self'].transformer_blocks[15], accessed_by=GetItemGuardAccessor(15) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15], 140581769896880) # for index_block, block in enumerate(self.transformer_blocks): # diffusers/src/diffusers/models/transformers/transformer_flux.py:471 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[15].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].training, 140591004393440) # for index_block, block in enumerate(self.transformer_blocks): # diffusers/src/diffusers/models/transformers/transformer_flux.py:471 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff, accessed_by=DictGetItemGuardAccessor(ff) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].ff, 140581769900528) # ff_output = self.ff(norm_hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:185 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[15].ff.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].ff.training, 140591004393440) # ff_output = self.ff(norm_hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:185 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff.net, accessed_by=DictGetItemGuardAccessor(net) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].ff.net, 140581769900768) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[15].ff.net, 93831537618768) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- LENGTH_CHECK: len(L['self'].transformer_blocks[15].ff.net) == 3 # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff.net.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff.net.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].ff.net.training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff.net[0], accessed_by=GetItemGuardAccessor(0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].ff.net[0], 140581769900720) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff.net[0].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[15].ff.net[0].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff.net[0].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].ff.net[0].training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff.net[0]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff.net[0].proj, accessed_by=DictGetItemGuardAccessor(proj) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].ff.net[0].proj, 140533115164400) # hidden_states = self.proj(hidden_states) # diffusers/src/diffusers/models/activations.py:88 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff.net[0].proj.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[15].ff.net[0].proj.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff.net[0].proj.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].ff.net[0].proj.training, 140591004393408) # hidden_states = self.proj(hidden_states) # diffusers/src/diffusers/models/activations.py:88 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff.net[0].proj._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff.net[0].proj.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].ff.net[0].proj.lora_A, 140533115172416) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff.net[0].proj.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff.net[0].proj.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].ff.net[0].proj.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff.net[0].proj.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].ff.net[0].proj.lora_A['default_0'], 140533115211248) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff.net[0].proj.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff.net[0].proj.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].ff.net[0].proj.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff.net[0].proj.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff.net[0].proj.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].ff.net[0].proj.lora_A['default_0'].weight, 140537316974944) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff.net[0].proj.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].ff.net[0].proj.lora_B, 140533115159936) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff.net[0].proj.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff.net[0].proj.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].ff.net[0].proj.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff.net[0].proj.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].ff.net[0].proj.lora_B['default_0'], 140533115221424) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff.net[0].proj.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff.net[0].proj.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].ff.net[0].proj.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff.net[0].proj.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].ff.net[0].proj.base_layer, 140581769900816) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff.net[0].proj.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff.net[0].proj.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].ff.net[0].proj.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff.net[0].proj.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].ff.net[0].proj.lora_dropout, 140533115161520) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff.net[0].proj.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff.net[0].proj.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].ff.net[0].proj.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff.net[0].proj.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].ff.net[0].proj.lora_dropout['default_0'], 140533115159648) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff.net[0].proj.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff.net[0].proj.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].ff.net[0].proj.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff.net[0].proj.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[15].ff.net[0].proj.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[15].ff.net[0].proj.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff.net[0].proj.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[15].ff.net[0].proj.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff.net[0].proj.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[15].ff.net[0].proj.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[15].ff.net[0].proj.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff.net[0].proj.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].ff.net[0].proj.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff.net[0].proj._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff.net[0].proj._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff.net[0].proj.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[15].ff.net[0].proj.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[15].ff.net[0].proj.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff.net[0].proj._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].ff.net[0].proj._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff.net[0].proj._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff.net[0].proj._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff.net[0].proj._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[15].ff.net[0].proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[15].ff.net[0].proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff.net[0].approximate, accessed_by=DictGetItemGuardAccessor(approximate) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[15].ff.net[0].approximate == 'tanh' # return F.gelu(gate, approximate=self.approximate) # diffusers/src/diffusers/models/activations.py:83 in gelu V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff.net[0]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff.net[0]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff.net[0]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff.net[0]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff.net[1], accessed_by=GetItemGuardAccessor(1) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].ff.net[1], 140581769900864) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff.net[1].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff.net[1].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].ff.net[1].training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff.net[2], accessed_by=GetItemGuardAccessor(2) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].ff.net[2], 140533115222000) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff.net[2].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[15].ff.net[2].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff.net[2].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].ff.net[2].training, 140591004393408) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff.net[2]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff.net[2].lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].ff.net[2].lora_A, 140533115218400) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff.net[2].lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff.net[2].lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].ff.net[2].lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff.net[2].lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].ff.net[2].lora_A['default_0'], 140533115210576) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff.net[2].lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff.net[2].lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].ff.net[2].lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff.net[2].lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff.net[2].lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].ff.net[2].lora_A['default_0'].weight, 140537316973024) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff.net[2].lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].ff.net[2].lora_B, 140533115222096) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff.net[2].lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff.net[2].lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].ff.net[2].lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff.net[2].lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].ff.net[2].lora_B['default_0'], 140533115210384) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff.net[2].lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff.net[2].lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].ff.net[2].lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff.net[2].base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].ff.net[2].base_layer, 140581769900912) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff.net[2].base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff.net[2].base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].ff.net[2].base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff.net[2].lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].ff.net[2].lora_dropout, 140533115219936) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff.net[2].lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff.net[2].lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].ff.net[2].lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff.net[2].lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].ff.net[2].lora_dropout['default_0'], 140533115208944) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff.net[2].lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff.net[2].lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].ff.net[2].lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff.net[2].scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[15].ff.net[2].scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[15].ff.net[2].scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff.net[2].scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[15].ff.net[2].scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff.net[2].use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[15].ff.net[2].use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[15].ff.net[2].use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff.net[2].use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].ff.net[2].use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff.net[2]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff.net[2]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff.net[2].merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[15].ff.net[2].merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[15].ff.net[2].merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff.net[2]._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].ff.net[2]._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff.net[2]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff.net[2]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff.net[2]._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[15].ff.net[2]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[15].ff.net[2]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn, accessed_by=DictGetItemGuardAccessor(attn) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn, 140581769899760) # attn_output, context_attn_output = self.attn( # diffusers/src/diffusers/models/transformers/transformer_flux.py:172 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[15].attn.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.training, 140591004393440) # attn_output, context_attn_output = self.attn( # diffusers/src/diffusers/models/transformers/transformer_flux.py:172 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_k, accessed_by=DictGetItemGuardAccessor(to_k) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.to_k, 140533116078016) # key = attn.to_k(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1717 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[15].attn.to_k.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.to_k.training, 140591004393408) # key = attn.to_k(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1717 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_k._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_k.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.to_k.lora_A, 140533116084544) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_k.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_k.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.to_k.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_k.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.to_k.lora_A['default_0'], 140533116076480) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_k.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_k.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.to_k.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_k.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_k.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.to_k.lora_A['default_0'].weight, 140537317159248) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_k.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.to_k.lora_B, 140533116077104) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_k.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_k.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.to_k.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_k.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.to_k.lora_B['default_0'], 140533116078832) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_k.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_k.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.to_k.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_k.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.to_k.base_layer, 140581769899904) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_k.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_k.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.to_k.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_k.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.to_k.lora_dropout, 140533116076576) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_k.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_k.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.to_k.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_k.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.to_k.lora_dropout['default_0'], 140533116076288) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_k.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_k.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.to_k.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_k.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[15].attn.to_k.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[15].attn.to_k.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_k.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[15].attn.to_k.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_k.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[15].attn.to_k.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[15].attn.to_k.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_k.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.to_k.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_k._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_k._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_k.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[15].attn.to_k.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[15].attn.to_k.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_k._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.to_k._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_k._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_k._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_k._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[15].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[15].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_q, accessed_by=DictGetItemGuardAccessor(to_q) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.to_q, 140533116472384) # query = attn.to_q(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1716 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[15].attn.to_q.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.to_q.training, 140591004393408) # query = attn.to_q(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1716 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_q._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_q.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.to_q.lora_A, 140533116473584) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_q.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_q.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.to_q.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_q.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.to_q.lora_A['default_0'], 140533116088864) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_q.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_q.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.to_q.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_q.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_q.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.to_q.lora_A['default_0'].weight, 140537317148528) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_q.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.to_q.lora_B, 140533116471856) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_q.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_q.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.to_q.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_q.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.to_q.lora_B['default_0'], 140533116088336) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_q.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_q.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.to_q.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_q.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.to_q.base_layer, 140581769900000) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_q.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_q.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.to_q.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_q.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.to_q.lora_dropout, 140533116472672) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_q.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_q.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.to_q.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_q.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.to_q.lora_dropout['default_0'], 140533116473008) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_q.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_q.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.to_q.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_q.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[15].attn.to_q.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[15].attn.to_q.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_q.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[15].attn.to_q.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_q.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[15].attn.to_q.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[15].attn.to_q.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_q.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.to_q.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_q._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_q._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_q.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[15].attn.to_q.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[15].attn.to_q.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_q._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.to_q._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_q._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_q._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_q._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[15].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[15].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_v, accessed_by=DictGetItemGuardAccessor(to_v) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.to_v, 140533116091072) # value = attn.to_v(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1718 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_v.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[15].attn.to_v.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_v.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.to_v.training, 140591004393408) # value = attn.to_v(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1718 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_v._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_v.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.to_v.lora_A, 140533116088624) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_v.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_v.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.to_v.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_v.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.to_v.lora_A['default_0'], 140533116080128) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_v.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_v.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.to_v.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_v.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_v.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.to_v.lora_A['default_0'].weight, 140537317162928) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_v.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.to_v.lora_B, 140533116088000) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_v.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_v.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.to_v.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_v.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.to_v.lora_B['default_0'], 140533116085888) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_v.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_v.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.to_v.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_v.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.to_v.base_layer, 140581769900096) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_v.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_v.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.to_v.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_v.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.to_v.lora_dropout, 140533116088480) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_v.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_v.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.to_v.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_v.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.to_v.lora_dropout['default_0'], 140533116089632) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_v.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_v.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.to_v.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_v.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[15].attn.to_v.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[15].attn.to_v.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_v.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[15].attn.to_v.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_v.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[15].attn.to_v.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[15].attn.to_v.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_v.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.to_v.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_v._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_v._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_v.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[15].attn.to_v.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[15].attn.to_v.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_v._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.to_v._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_v._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_v._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_v._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[15].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[15].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.norm_k, accessed_by=DictGetItemGuardAccessor(norm_k) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.norm_k, 140581769899952) # if attn.norm_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1729 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.norm_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[15].attn.norm_k.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.norm_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.norm_k.training, 140591004393440) # if attn.norm_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1729 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.norm_k.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[15].attn.norm_k.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.norm_k._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.norm_k.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.norm_k.weight, 140581765900192) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.norm_k._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.norm_k._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.norm_k._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.norm_k._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.norm_q, accessed_by=DictGetItemGuardAccessor(norm_q) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.norm_q, 140581769899856) # if attn.norm_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1727 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.norm_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[15].attn.norm_q.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.norm_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.norm_q.training, 140591004393440) # if attn.norm_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1727 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.norm_q.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[15].attn.norm_q.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.norm_q._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.norm_q.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.norm_q.weight, 140581773243424) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.norm_q._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.norm_q._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.norm_q._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.norm_q._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_out, accessed_by=DictGetItemGuardAccessor(to_out) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.to_out, 140581769900288) # hidden_states = attn.to_out[0](hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1776 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_out.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_out.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.to_out.training, 140591004393440) # hidden_states = attn.to_out[0](hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1776 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_out[0], accessed_by=GetItemGuardAccessor(0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.to_out[0], 140533115079360) # hidden_states = attn.to_out[0](hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1776 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_out[0].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[15].attn.to_out[0].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_out[0].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.to_out[0].training, 140591004393408) # hidden_states = attn.to_out[0](hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1776 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_out[0]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_out[0].lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.to_out[0].lora_A, 140533115078784) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_out[0].lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_out[0].lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.to_out[0].lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_out[0].lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.to_out[0].lora_A['default_0'], 140533115173760) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_out[0].lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_out[0].lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.to_out[0].lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_out[0].lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_out[0].lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.to_out[0].lora_A['default_0'].weight, 140537316981984) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_out[0].lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.to_out[0].lora_B, 140533115161424) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_out[0].lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_out[0].lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.to_out[0].lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_out[0].lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.to_out[0].lora_B['default_0'], 140533115162144) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_out[0].lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_out[0].lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.to_out[0].lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_out[0].base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.to_out[0].base_layer, 140581769900336) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_out[0].base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_out[0].base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.to_out[0].base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_out[0].lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.to_out[0].lora_dropout, 140533115079504) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_out[0].lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_out[0].lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.to_out[0].lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_out[0].lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.to_out[0].lora_dropout['default_0'], 140533115078688) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_out[0].lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_out[0].lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.to_out[0].lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_out[0].scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[15].attn.to_out[0].scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[15].attn.to_out[0].scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_out[0].scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[15].attn.to_out[0].scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_out[0].use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[15].attn.to_out[0].use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[15].attn.to_out[0].use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_out[0].use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.to_out[0].use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_out[0]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_out[0]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_out[0].merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[15].attn.to_out[0].merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[15].attn.to_out[0].merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_out[0]._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.to_out[0]._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_out[0]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_out[0]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_out[0]._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[15].attn.to_out[0]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[15].attn.to_out[0]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_out[1], accessed_by=GetItemGuardAccessor(1) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.to_out[1], 140581769900384) # hidden_states = attn.to_out[1](hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1778 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_out[1].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_out[1].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.to_out[1].training, 140591004393440) # hidden_states = attn.to_out[1](hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1778 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.add_k_proj, accessed_by=DictGetItemGuardAccessor(add_k_proj) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.add_k_proj, 140533116087424) # encoder_hidden_states_key_proj = attn.add_k_proj(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1736 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.add_k_proj.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[15].attn.add_k_proj.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.add_k_proj.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.add_k_proj.training, 140591004393408) # encoder_hidden_states_key_proj = attn.add_k_proj(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1736 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.add_k_proj._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.add_k_proj.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.add_k_proj.lora_A, 140533116088048) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.add_k_proj.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.add_k_proj.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.add_k_proj.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.add_k_proj.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.add_k_proj.lora_A['default_0'], 140533116077152) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.add_k_proj.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.add_k_proj.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.add_k_proj.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.add_k_proj.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.add_k_proj.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.add_k_proj.lora_A['default_0'].weight, 140537317153968) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.add_k_proj.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.add_k_proj.lora_B, 140533116083920) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.add_k_proj.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.add_k_proj.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.add_k_proj.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.add_k_proj.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.add_k_proj.lora_B['default_0'], 140533116077392) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.add_k_proj.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.add_k_proj.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.add_k_proj.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.add_k_proj.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.add_k_proj.base_layer, 140581769900144) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.add_k_proj.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.add_k_proj.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.add_k_proj.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.add_k_proj.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.add_k_proj.lora_dropout, 140533116082000) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.add_k_proj.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.add_k_proj.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.add_k_proj.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.add_k_proj.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.add_k_proj.lora_dropout['default_0'], 140533116078736) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.add_k_proj.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.add_k_proj.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.add_k_proj.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.add_k_proj.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[15].attn.add_k_proj.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[15].attn.add_k_proj.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.add_k_proj.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[15].attn.add_k_proj.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.add_k_proj.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[15].attn.add_k_proj.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[15].attn.add_k_proj.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.add_k_proj.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.add_k_proj.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.add_k_proj._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.add_k_proj._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.add_k_proj.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[15].attn.add_k_proj.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[15].attn.add_k_proj.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.add_k_proj._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.add_k_proj._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.add_k_proj._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.add_k_proj._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.add_k_proj._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[15].attn.add_k_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[15].attn.add_k_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.add_q_proj, accessed_by=DictGetItemGuardAccessor(add_q_proj) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.add_q_proj, 140533115077248) # encoder_hidden_states_query_proj = attn.add_q_proj(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1735 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.add_q_proj.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[15].attn.add_q_proj.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.add_q_proj.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.add_q_proj.training, 140591004393408) # encoder_hidden_states_query_proj = attn.add_q_proj(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1735 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.add_q_proj._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.add_q_proj.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.add_q_proj.lora_A, 140533115077392) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.add_q_proj.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.add_q_proj.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.add_q_proj.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.add_q_proj.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.add_q_proj.lora_A['default_0'], 140533115077680) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.add_q_proj.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.add_q_proj.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.add_q_proj.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.add_q_proj.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.add_q_proj.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.add_q_proj.lora_A['default_0'].weight, 140537316972544) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.add_q_proj.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.add_q_proj.lora_B, 140533115076912) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.add_q_proj.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.add_q_proj.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.add_q_proj.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.add_q_proj.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.add_q_proj.lora_B['default_0'], 140533115078976) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.add_q_proj.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.add_q_proj.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.add_q_proj.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.add_q_proj.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.add_q_proj.base_layer, 140581769900240) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.add_q_proj.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.add_q_proj.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.add_q_proj.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.add_q_proj.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.add_q_proj.lora_dropout, 140533115087136) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.add_q_proj.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.add_q_proj.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.add_q_proj.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.add_q_proj.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.add_q_proj.lora_dropout['default_0'], 140533115087088) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.add_q_proj.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.add_q_proj.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.add_q_proj.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.add_q_proj.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[15].attn.add_q_proj.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[15].attn.add_q_proj.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.add_q_proj.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[15].attn.add_q_proj.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.add_q_proj.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[15].attn.add_q_proj.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[15].attn.add_q_proj.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.add_q_proj.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.add_q_proj.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.add_q_proj._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.add_q_proj._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.add_q_proj.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[15].attn.add_q_proj.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[15].attn.add_q_proj.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.add_q_proj._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.add_q_proj._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.add_q_proj._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.add_q_proj._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.add_q_proj._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[15].attn.add_q_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[15].attn.add_q_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.add_v_proj, accessed_by=DictGetItemGuardAccessor(add_v_proj) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.add_v_proj, 140533116086320) # encoder_hidden_states_value_proj = attn.add_v_proj(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1737 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.add_v_proj.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[15].attn.add_v_proj.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.add_v_proj.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.add_v_proj.training, 140591004393408) # encoder_hidden_states_value_proj = attn.add_v_proj(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1737 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.add_v_proj._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.add_v_proj.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.add_v_proj.lora_A, 140533116077344) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.add_v_proj.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.add_v_proj.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.add_v_proj.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.add_v_proj.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.add_v_proj.lora_A['default_0'], 140533115082816) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.add_v_proj.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.add_v_proj.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.add_v_proj.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.add_v_proj.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.add_v_proj.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.add_v_proj.lora_A['default_0'].weight, 140537317151328) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.add_v_proj.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.add_v_proj.lora_B, 140533116085456) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.add_v_proj.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.add_v_proj.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.add_v_proj.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.add_v_proj.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.add_v_proj.lora_B['default_0'], 140533115091120) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.add_v_proj.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.add_v_proj.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.add_v_proj.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.add_v_proj.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.add_v_proj.base_layer, 140581769900192) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.add_v_proj.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.add_v_proj.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.add_v_proj.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.add_v_proj.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.add_v_proj.lora_dropout, 140533116085312) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.add_v_proj.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.add_v_proj.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.add_v_proj.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.add_v_proj.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.add_v_proj.lora_dropout['default_0'], 140533116077200) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.add_v_proj.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.add_v_proj.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.add_v_proj.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.add_v_proj.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[15].attn.add_v_proj.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[15].attn.add_v_proj.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.add_v_proj.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[15].attn.add_v_proj.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.add_v_proj.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[15].attn.add_v_proj.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[15].attn.add_v_proj.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.add_v_proj.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.add_v_proj.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.add_v_proj._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.add_v_proj._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.add_v_proj.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[15].attn.add_v_proj.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[15].attn.add_v_proj.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.add_v_proj._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.add_v_proj._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.add_v_proj._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.add_v_proj._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.add_v_proj._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[15].attn.add_v_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[15].attn.add_v_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_add_out, accessed_by=DictGetItemGuardAccessor(to_add_out) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.to_add_out, 140533115160128) # encoder_hidden_states = attn.to_add_out(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1779 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_add_out.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[15].attn.to_add_out.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_add_out.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.to_add_out.training, 140591004393408) # encoder_hidden_states = attn.to_add_out(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1779 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_add_out._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_add_out.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.to_add_out.lora_A, 140533115161760) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_add_out.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_add_out.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.to_add_out.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_add_out.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.to_add_out.lora_A['default_0'], 140533115165024) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_add_out.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_add_out.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.to_add_out.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_add_out.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_add_out.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.to_add_out.lora_A['default_0'].weight, 140537316976144) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_add_out.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.to_add_out.lora_B, 140533115168000) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_add_out.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_add_out.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.to_add_out.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_add_out.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.to_add_out.lora_B['default_0'], 140533115171648) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_add_out.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_add_out.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.to_add_out.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_add_out.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.to_add_out.base_layer, 140581769900432) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_add_out.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_add_out.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.to_add_out.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_add_out.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.to_add_out.lora_dropout, 140533115159744) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_add_out.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_add_out.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.to_add_out.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_add_out.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.to_add_out.lora_dropout['default_0'], 140533115162192) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_add_out.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_add_out.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.to_add_out.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_add_out.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[15].attn.to_add_out.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[15].attn.to_add_out.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_add_out.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[15].attn.to_add_out.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_add_out.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[15].attn.to_add_out.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[15].attn.to_add_out.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_add_out.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.to_add_out.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_add_out._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_add_out._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_add_out.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[15].attn.to_add_out.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[15].attn.to_add_out.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_add_out._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.to_add_out._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_add_out._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_add_out._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_add_out._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[15].attn.to_add_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[15].attn.to_add_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.norm_added_k, accessed_by=DictGetItemGuardAccessor(norm_added_k) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.norm_added_k, 140581769900576) # if attn.norm_added_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1751 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.norm_added_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[15].attn.norm_added_k.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.norm_added_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.norm_added_k.training, 140591004393440) # if attn.norm_added_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1751 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.norm_added_k.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[15].attn.norm_added_k.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.norm_added_k._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.norm_added_k.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.norm_added_k.weight, 140581772718976) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.norm_added_k._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.norm_added_k._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.norm_added_k._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.norm_added_k._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.norm_added_q, accessed_by=DictGetItemGuardAccessor(norm_added_q) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.norm_added_q, 140581769900480) # if attn.norm_added_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1749 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.norm_added_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[15].attn.norm_added_q.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.norm_added_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.norm_added_q.training, 140591004393440) # if attn.norm_added_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1749 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.norm_added_q.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[15].attn.norm_added_q.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.norm_added_q._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.norm_added_q.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.norm_added_q.weight, 140581772751824) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.norm_added_q._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.norm_added_q._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.norm_added_q._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.norm_added_q._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.heads, accessed_by=DictGetItemGuardAccessor(heads) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[15].attn.heads == 24 # head_dim = inner_dim // attn.heads # diffusers/src/diffusers/models/attention_processor.py:1721 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.processor, accessed_by=DictGetItemGuardAccessor(processor) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[15].attn.processor, 93831581524080) # attn_parameters = set(inspect.signature(self.processor.__call__).parameters.keys()) # diffusers/src/diffusers/models/attention_processor.py:479 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.processor, 140581769899712) # return self.processor( # diffusers/src/diffusers/models/attention_processor.py:490 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].norm1, accessed_by=DictGetItemGuardAccessor(norm1) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].norm1, 140581769899280) # norm_hidden_states, gate_msa, shift_mlp, scale_mlp, gate_mlp = self.norm1(hidden_states, emb=temb) # diffusers/src/diffusers/models/transformers/transformer_flux.py:165 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].norm1.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[15].norm1.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].norm1.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].norm1.training, 140591004393440) # norm_hidden_states, gate_msa, shift_mlp, scale_mlp, gate_mlp = self.norm1(hidden_states, emb=temb) # diffusers/src/diffusers/models/transformers/transformer_flux.py:165 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].norm1.emb, accessed_by=DictGetItemGuardAccessor(emb) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].norm1.emb, 140591004478624) # if self.emb is not None: # diffusers/src/diffusers/models/normalization.py:135 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].norm1._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].norm1.norm, accessed_by=DictGetItemGuardAccessor(norm) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].norm1.norm, 140581769899424) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:139 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].norm1.norm.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].norm1.norm.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].norm1.norm.training, 140591004393440) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:139 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].norm1.silu, accessed_by=DictGetItemGuardAccessor(silu) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].norm1.silu, 140581769899328) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].norm1.silu.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].norm1.silu.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].norm1.silu.training, 140591004393440) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].norm1.linear, accessed_by=DictGetItemGuardAccessor(linear) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].norm1.linear, 140533116471760) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].norm1.linear.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[15].norm1.linear.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].norm1.linear.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].norm1.linear.training, 140591004393408) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].norm1.linear._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].norm1.linear.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].norm1.linear.lora_A, 140533116482272) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].norm1.linear.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].norm1.linear.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].norm1.linear.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].norm1.linear.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].norm1.linear.lora_A['default_0'], 140533116484912) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].norm1.linear.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].norm1.linear.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].norm1.linear.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].norm1.linear.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].norm1.linear.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].norm1.linear.lora_A['default_0'].weight, 140537315218816) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].norm1.linear.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].norm1.linear.lora_B, 140533116482464) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].norm1.linear.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].norm1.linear.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].norm1.linear.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].norm1.linear.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].norm1.linear.lora_B['default_0'], 140533116485008) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].norm1.linear.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].norm1.linear.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].norm1.linear.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].norm1.linear.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].norm1.linear.base_layer, 140581769899376) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].norm1.linear.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].norm1.linear.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].norm1.linear.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].norm1.linear.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].norm1.linear.lora_dropout, 140533116474160) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].norm1.linear.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].norm1.linear.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].norm1.linear.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].norm1.linear.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].norm1.linear.lora_dropout['default_0'], 140533116473632) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].norm1.linear.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].norm1.linear.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].norm1.linear.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].norm1.linear.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[15].norm1.linear.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[15].norm1.linear.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].norm1.linear.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[15].norm1.linear.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].norm1.linear.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[15].norm1.linear.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[15].norm1.linear.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].norm1.linear.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].norm1.linear.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].norm1.linear._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].norm1.linear._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].norm1.linear.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[15].norm1.linear.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[15].norm1.linear.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].norm1.linear._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].norm1.linear._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].norm1.linear._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].norm1.linear._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].norm1.linear._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[15].norm1.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[15].norm1.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].norm1._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].norm1._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].norm1._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].norm1._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].norm2, accessed_by=DictGetItemGuardAccessor(norm2) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].norm2, 140581769900624) # norm_hidden_states = self.norm2(hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:182 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].norm2.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].norm2.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].norm2.training, 140591004393440) # norm_hidden_states = self.norm2(hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:182 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff_context, accessed_by=DictGetItemGuardAccessor(ff_context) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].ff_context, 140581769900960) # context_ff_output = self.ff_context(norm_encoder_hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:198 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff_context.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[15].ff_context.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff_context.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].ff_context.training, 140591004393440) # context_ff_output = self.ff_context(norm_encoder_hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:198 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff_context._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff_context.net, accessed_by=DictGetItemGuardAccessor(net) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].ff_context.net, 140581769901104) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[15].ff_context.net, 93831537618768) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- LENGTH_CHECK: len(L['self'].transformer_blocks[15].ff_context.net) == 3 # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff_context.net.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff_context.net.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].ff_context.net.training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff_context.net[0], accessed_by=GetItemGuardAccessor(0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].ff_context.net[0], 140581769901056) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff_context.net[0].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[15].ff_context.net[0].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff_context.net[0].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].ff_context.net[0].training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff_context.net[0]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff_context.net[0].proj, accessed_by=DictGetItemGuardAccessor(proj) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].ff_context.net[0].proj, 140533115221952) # hidden_states = self.proj(hidden_states) # diffusers/src/diffusers/models/activations.py:88 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff_context.net[0].proj.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[15].ff_context.net[0].proj.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff_context.net[0].proj.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].ff_context.net[0].proj.training, 140591004393408) # hidden_states = self.proj(hidden_states) # diffusers/src/diffusers/models/activations.py:88 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff_context.net[0].proj._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff_context.net[0].proj.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].ff_context.net[0].proj.lora_A, 140533115211440) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff_context.net[0].proj.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff_context.net[0].proj.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].ff_context.net[0].proj.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff_context.net[0].proj.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].ff_context.net[0].proj.lora_A['default_0'], 140533115222048) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff_context.net[0].proj.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff_context.net[0].proj.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].ff_context.net[0].proj.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff_context.net[0].proj.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff_context.net[0].proj.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].ff_context.net[0].proj.lora_A['default_0'].weight, 140537316857776) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff_context.net[0].proj.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].ff_context.net[0].proj.lora_B, 140533115218736) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff_context.net[0].proj.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff_context.net[0].proj.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].ff_context.net[0].proj.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff_context.net[0].proj.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].ff_context.net[0].proj.lora_B['default_0'], 140533115223584) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff_context.net[0].proj.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff_context.net[0].proj.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].ff_context.net[0].proj.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff_context.net[0].proj.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].ff_context.net[0].proj.base_layer, 140581769901152) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff_context.net[0].proj.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff_context.net[0].proj.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].ff_context.net[0].proj.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff_context.net[0].proj.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].ff_context.net[0].proj.lora_dropout, 140533115220992) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff_context.net[0].proj.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff_context.net[0].proj.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].ff_context.net[0].proj.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff_context.net[0].proj.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].ff_context.net[0].proj.lora_dropout['default_0'], 140533115209520) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff_context.net[0].proj.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff_context.net[0].proj.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].ff_context.net[0].proj.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff_context.net[0].proj.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[15].ff_context.net[0].proj.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[15].ff_context.net[0].proj.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff_context.net[0].proj.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[15].ff_context.net[0].proj.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff_context.net[0].proj.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[15].ff_context.net[0].proj.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[15].ff_context.net[0].proj.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff_context.net[0].proj.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].ff_context.net[0].proj.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff_context.net[0].proj._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff_context.net[0].proj._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff_context.net[0].proj.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[15].ff_context.net[0].proj.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[15].ff_context.net[0].proj.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff_context.net[0].proj._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].ff_context.net[0].proj._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff_context.net[0].proj._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff_context.net[0].proj._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff_context.net[0].proj._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[15].ff_context.net[0].proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[15].ff_context.net[0].proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff_context.net[0].approximate, accessed_by=DictGetItemGuardAccessor(approximate) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[15].ff_context.net[0].approximate == 'tanh' # return F.gelu(gate, approximate=self.approximate) # diffusers/src/diffusers/models/activations.py:83 in gelu V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff_context.net[0]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff_context.net[0]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff_context.net[0]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff_context.net[0]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff_context.net[1], accessed_by=GetItemGuardAccessor(1) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].ff_context.net[1], 140581769901248) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff_context.net[1].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff_context.net[1].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].ff_context.net[1].training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff_context.net[2], accessed_by=GetItemGuardAccessor(2) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].ff_context.net[2], 140533115212640) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff_context.net[2].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[15].ff_context.net[2].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff_context.net[2].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].ff_context.net[2].training, 140591004393408) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff_context.net[2]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff_context.net[2].lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].ff_context.net[2].lora_A, 140533115210192) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff_context.net[2].lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff_context.net[2].lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].ff_context.net[2].lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff_context.net[2].lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].ff_context.net[2].lora_A['default_0'], 140533115210528) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff_context.net[2].lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff_context.net[2].lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].ff_context.net[2].lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff_context.net[2].lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff_context.net[2].lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].ff_context.net[2].lora_A['default_0'].weight, 140537316852976) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff_context.net[2].lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].ff_context.net[2].lora_B, 140533115212160) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff_context.net[2].lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff_context.net[2].lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].ff_context.net[2].lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff_context.net[2].lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].ff_context.net[2].lora_B['default_0'], 140533115211488) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff_context.net[2].lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff_context.net[2].lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].ff_context.net[2].lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff_context.net[2].base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].ff_context.net[2].base_layer, 140581769901296) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff_context.net[2].base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff_context.net[2].base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].ff_context.net[2].base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff_context.net[2].lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].ff_context.net[2].lora_dropout, 140533115222576) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff_context.net[2].lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff_context.net[2].lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].ff_context.net[2].lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff_context.net[2].lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].ff_context.net[2].lora_dropout['default_0'], 140533115222960) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff_context.net[2].lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff_context.net[2].lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].ff_context.net[2].lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff_context.net[2].scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[15].ff_context.net[2].scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[15].ff_context.net[2].scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff_context.net[2].scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[15].ff_context.net[2].scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff_context.net[2].use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[15].ff_context.net[2].use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[15].ff_context.net[2].use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff_context.net[2].use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].ff_context.net[2].use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff_context.net[2]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff_context.net[2]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff_context.net[2].merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[15].ff_context.net[2].merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[15].ff_context.net[2].merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff_context.net[2]._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].ff_context.net[2]._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff_context.net[2]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff_context.net[2]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff_context.net[2]._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[15].ff_context.net[2]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[15].ff_context.net[2]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff_context._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff_context._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff_context._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff_context._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].norm1_context, accessed_by=DictGetItemGuardAccessor(norm1_context) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].norm1_context, 140581769899472) # norm_encoder_hidden_states, c_gate_msa, c_shift_mlp, c_scale_mlp, c_gate_mlp = self.norm1_context( # diffusers/src/diffusers/models/transformers/transformer_flux.py:167 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].norm1_context.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[15].norm1_context.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].norm1_context.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].norm1_context.training, 140591004393440) # norm_encoder_hidden_states, c_gate_msa, c_shift_mlp, c_scale_mlp, c_gate_mlp = self.norm1_context( # diffusers/src/diffusers/models/transformers/transformer_flux.py:167 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].norm1_context.emb, accessed_by=DictGetItemGuardAccessor(emb) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].norm1_context.emb, 140591004478624) # if self.emb is not None: # diffusers/src/diffusers/models/normalization.py:135 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].norm1_context._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].norm1_context.norm, accessed_by=DictGetItemGuardAccessor(norm) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].norm1_context.norm, 140581769899664) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:139 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].norm1_context.norm.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].norm1_context.norm.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].norm1_context.norm.training, 140591004393440) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:139 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].norm1_context.silu, accessed_by=DictGetItemGuardAccessor(silu) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].norm1_context.silu, 140581769899568) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].norm1_context.silu.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].norm1_context.silu.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].norm1_context.silu.training, 140591004393440) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].norm1_context.linear, accessed_by=DictGetItemGuardAccessor(linear) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].norm1_context.linear, 140533116483808) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].norm1_context.linear.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[15].norm1_context.linear.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].norm1_context.linear.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].norm1_context.linear.training, 140591004393408) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].norm1_context.linear._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].norm1_context.linear.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].norm1_context.linear.lora_A, 140533116483280) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].norm1_context.linear.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].norm1_context.linear.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].norm1_context.linear.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].norm1_context.linear.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].norm1_context.linear.lora_A['default_0'], 140533116469600) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].norm1_context.linear.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].norm1_context.linear.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].norm1_context.linear.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].norm1_context.linear.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].norm1_context.linear.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].norm1_context.linear.lora_A['default_0'].weight, 140537317149008) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].norm1_context.linear.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].norm1_context.linear.lora_B, 140533116483760) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].norm1_context.linear.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].norm1_context.linear.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].norm1_context.linear.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].norm1_context.linear.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].norm1_context.linear.lora_B['default_0'], 140533116470752) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].norm1_context.linear.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].norm1_context.linear.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].norm1_context.linear.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].norm1_context.linear.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].norm1_context.linear.base_layer, 140581769899616) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].norm1_context.linear.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].norm1_context.linear.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].norm1_context.linear.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].norm1_context.linear.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].norm1_context.linear.lora_dropout, 140533116479632) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].norm1_context.linear.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].norm1_context.linear.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].norm1_context.linear.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].norm1_context.linear.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].norm1_context.linear.lora_dropout['default_0'], 140533116484288) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].norm1_context.linear.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].norm1_context.linear.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].norm1_context.linear.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].norm1_context.linear.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[15].norm1_context.linear.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[15].norm1_context.linear.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].norm1_context.linear.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[15].norm1_context.linear.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].norm1_context.linear.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[15].norm1_context.linear.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[15].norm1_context.linear.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].norm1_context.linear.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].norm1_context.linear.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].norm1_context.linear._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].norm1_context.linear._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].norm1_context.linear.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[15].norm1_context.linear.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[15].norm1_context.linear.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].norm1_context.linear._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].norm1_context.linear._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].norm1_context.linear._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].norm1_context.linear._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].norm1_context.linear._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[15].norm1_context.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[15].norm1_context.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].norm1_context._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].norm1_context._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].norm1_context._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].norm1_context._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].norm2_context, accessed_by=DictGetItemGuardAccessor(norm2_context) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].norm2_context, 140581769900672) # norm_encoder_hidden_states = self.norm2_context(encoder_hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:195 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].norm2_context.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].norm2_context.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].norm2_context.training, 140591004393440) # norm_encoder_hidden_states = self.norm2_context(encoder_hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:195 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | +- GuardManager: source=L['self'].transformer_blocks[16], accessed_by=GetItemGuardAccessor(16) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16], 140581769898944) # for index_block, block in enumerate(self.transformer_blocks): # diffusers/src/diffusers/models/transformers/transformer_flux.py:471 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[16].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].training, 140591004393440) # for index_block, block in enumerate(self.transformer_blocks): # diffusers/src/diffusers/models/transformers/transformer_flux.py:471 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff, accessed_by=DictGetItemGuardAccessor(ff) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].ff, 140581769902592) # ff_output = self.ff(norm_hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:185 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[16].ff.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].ff.training, 140591004393440) # ff_output = self.ff(norm_hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:185 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff.net, accessed_by=DictGetItemGuardAccessor(net) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].ff.net, 140581769902832) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[16].ff.net, 93831537618768) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- LENGTH_CHECK: len(L['self'].transformer_blocks[16].ff.net) == 3 # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff.net.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff.net.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].ff.net.training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff.net[0], accessed_by=GetItemGuardAccessor(0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].ff.net[0], 140581769902784) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff.net[0].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[16].ff.net[0].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff.net[0].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].ff.net[0].training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff.net[0]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff.net[0].proj, accessed_by=DictGetItemGuardAccessor(proj) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].ff.net[0].proj, 140533115042016) # hidden_states = self.proj(hidden_states) # diffusers/src/diffusers/models/activations.py:88 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff.net[0].proj.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[16].ff.net[0].proj.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff.net[0].proj.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].ff.net[0].proj.training, 140591004393408) # hidden_states = self.proj(hidden_states) # diffusers/src/diffusers/models/activations.py:88 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff.net[0].proj._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff.net[0].proj.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].ff.net[0].proj.lora_A, 140533115034048) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff.net[0].proj.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff.net[0].proj.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].ff.net[0].proj.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff.net[0].proj.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].ff.net[0].proj.lora_A['default_0'], 140533115040048) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff.net[0].proj.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff.net[0].proj.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].ff.net[0].proj.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff.net[0].proj.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff.net[0].proj.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].ff.net[0].proj.lora_A['default_0'].weight, 140537316538736) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff.net[0].proj.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].ff.net[0].proj.lora_B, 140533115043360) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff.net[0].proj.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff.net[0].proj.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].ff.net[0].proj.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff.net[0].proj.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].ff.net[0].proj.lora_B['default_0'], 140533115029152) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff.net[0].proj.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff.net[0].proj.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].ff.net[0].proj.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff.net[0].proj.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].ff.net[0].proj.base_layer, 140581769902880) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff.net[0].proj.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff.net[0].proj.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].ff.net[0].proj.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff.net[0].proj.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].ff.net[0].proj.lora_dropout, 140533115033088) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff.net[0].proj.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff.net[0].proj.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].ff.net[0].proj.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff.net[0].proj.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].ff.net[0].proj.lora_dropout['default_0'], 140533115043504) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff.net[0].proj.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff.net[0].proj.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].ff.net[0].proj.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff.net[0].proj.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[16].ff.net[0].proj.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[16].ff.net[0].proj.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff.net[0].proj.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[16].ff.net[0].proj.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff.net[0].proj.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[16].ff.net[0].proj.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[16].ff.net[0].proj.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff.net[0].proj.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].ff.net[0].proj.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff.net[0].proj._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff.net[0].proj._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff.net[0].proj.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[16].ff.net[0].proj.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[16].ff.net[0].proj.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff.net[0].proj._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].ff.net[0].proj._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff.net[0].proj._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff.net[0].proj._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff.net[0].proj._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[16].ff.net[0].proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[16].ff.net[0].proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff.net[0].approximate, accessed_by=DictGetItemGuardAccessor(approximate) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[16].ff.net[0].approximate == 'tanh' # return F.gelu(gate, approximate=self.approximate) # diffusers/src/diffusers/models/activations.py:83 in gelu V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff.net[0]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff.net[0]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff.net[0]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff.net[0]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff.net[1], accessed_by=GetItemGuardAccessor(1) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].ff.net[1], 140581769902928) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff.net[1].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff.net[1].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].ff.net[1].training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff.net[2], accessed_by=GetItemGuardAccessor(2) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].ff.net[2], 140533115040672) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff.net[2].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[16].ff.net[2].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff.net[2].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].ff.net[2].training, 140591004393408) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff.net[2]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff.net[2].lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].ff.net[2].lora_A, 140533115027760) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff.net[2].lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff.net[2].lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].ff.net[2].lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff.net[2].lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].ff.net[2].lora_A['default_0'], 140533115040192) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff.net[2].lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff.net[2].lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].ff.net[2].lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff.net[2].lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff.net[2].lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].ff.net[2].lora_A['default_0'].weight, 140537316533936) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff.net[2].lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].ff.net[2].lora_B, 140533115031984) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff.net[2].lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff.net[2].lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].ff.net[2].lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff.net[2].lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].ff.net[2].lora_B['default_0'], 140533115036544) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff.net[2].lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff.net[2].lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].ff.net[2].lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff.net[2].base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].ff.net[2].base_layer, 140581769902976) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff.net[2].base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff.net[2].base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].ff.net[2].base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff.net[2].lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].ff.net[2].lora_dropout, 140533115033472) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff.net[2].lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff.net[2].lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].ff.net[2].lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff.net[2].lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].ff.net[2].lora_dropout['default_0'], 140533115029056) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff.net[2].lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff.net[2].lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].ff.net[2].lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff.net[2].scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[16].ff.net[2].scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[16].ff.net[2].scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff.net[2].scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[16].ff.net[2].scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff.net[2].use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[16].ff.net[2].use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[16].ff.net[2].use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff.net[2].use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].ff.net[2].use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff.net[2]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff.net[2]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff.net[2].merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[16].ff.net[2].merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[16].ff.net[2].merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff.net[2]._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].ff.net[2]._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff.net[2]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff.net[2]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff.net[2]._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[16].ff.net[2]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[16].ff.net[2]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn, accessed_by=DictGetItemGuardAccessor(attn) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn, 140581769901824) # attn_output, context_attn_output = self.attn( # diffusers/src/diffusers/models/transformers/transformer_flux.py:172 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[16].attn.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.training, 140591004393440) # attn_output, context_attn_output = self.attn( # diffusers/src/diffusers/models/transformers/transformer_flux.py:172 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_k, accessed_by=DictGetItemGuardAccessor(to_k) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.to_k, 140533114649424) # key = attn.to_k(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1717 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[16].attn.to_k.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.to_k.training, 140591004393408) # key = attn.to_k(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1717 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_k._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_k.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.to_k.lora_A, 140533114634688) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_k.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_k.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.to_k.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_k.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.to_k.lora_A['default_0'], 140533114640064) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_k.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_k.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.to_k.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_k.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_k.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.to_k.lora_A['default_0'].weight, 140537316856416) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_k.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.to_k.lora_B, 140533114649520) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_k.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_k.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.to_k.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_k.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.to_k.lora_B['default_0'], 140533114647600) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_k.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_k.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.to_k.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_k.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.to_k.base_layer, 140581769901968) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_k.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_k.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.to_k.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_k.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.to_k.lora_dropout, 140533114647024) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_k.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_k.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.to_k.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_k.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.to_k.lora_dropout['default_0'], 140533114640112) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_k.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_k.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.to_k.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_k.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[16].attn.to_k.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[16].attn.to_k.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_k.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[16].attn.to_k.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_k.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[16].attn.to_k.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[16].attn.to_k.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_k.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.to_k.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_k._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_k._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_k.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[16].attn.to_k.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[16].attn.to_k.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_k._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.to_k._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_k._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_k._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_k._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[16].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[16].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_q, accessed_by=DictGetItemGuardAccessor(to_q) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.to_q, 140533114635696) # query = attn.to_q(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1716 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[16].attn.to_q.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.to_q.training, 140591004393408) # query = attn.to_q(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1716 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_q._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_q.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.to_q.lora_A, 140533114641744) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_q.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_q.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.to_q.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_q.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.to_q.lora_A['default_0'], 140533114646448) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_q.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_q.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.to_q.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_q.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_q.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.to_q.lora_A['default_0'].weight, 140537316861056) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_q.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.to_q.lora_B, 140533114648272) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_q.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_q.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.to_q.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_q.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.to_q.lora_B['default_0'], 140533114645104) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_q.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_q.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.to_q.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_q.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.to_q.base_layer, 140581769902064) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_q.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_q.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.to_q.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_q.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.to_q.lora_dropout, 140533114650432) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_q.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_q.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.to_q.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_q.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.to_q.lora_dropout['default_0'], 140533114648080) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_q.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_q.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.to_q.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_q.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[16].attn.to_q.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[16].attn.to_q.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_q.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[16].attn.to_q.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_q.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[16].attn.to_q.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[16].attn.to_q.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_q.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.to_q.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_q._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_q._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_q.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[16].attn.to_q.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[16].attn.to_q.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_q._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.to_q._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_q._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_q._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_q._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[16].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[16].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_v, accessed_by=DictGetItemGuardAccessor(to_v) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.to_v, 140533117876544) # value = attn.to_v(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1718 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_v.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[16].attn.to_v.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_v.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.to_v.training, 140591004393408) # value = attn.to_v(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1718 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_v._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_v.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.to_v.lora_A, 140533116575584) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_v.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_v.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.to_v.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_v.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.to_v.lora_A['default_0'], 140533116577360) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_v.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_v.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.to_v.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_v.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_v.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.to_v.lora_A['default_0'].weight, 140537316670048) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_v.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.to_v.lora_B, 140533116577456) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_v.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_v.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.to_v.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_v.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.to_v.lora_B['default_0'], 140533116575776) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_v.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_v.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.to_v.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_v.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.to_v.base_layer, 140581769902160) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_v.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_v.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.to_v.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_v.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.to_v.lora_dropout, 140533116577600) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_v.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_v.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.to_v.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_v.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.to_v.lora_dropout['default_0'], 140533116581824) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_v.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_v.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.to_v.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_v.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[16].attn.to_v.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[16].attn.to_v.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_v.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[16].attn.to_v.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_v.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[16].attn.to_v.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[16].attn.to_v.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_v.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.to_v.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_v._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_v._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_v.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[16].attn.to_v.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[16].attn.to_v.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_v._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.to_v._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_v._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_v._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_v._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[16].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[16].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.norm_k, accessed_by=DictGetItemGuardAccessor(norm_k) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.norm_k, 140581769902016) # if attn.norm_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1729 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.norm_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[16].attn.norm_k.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.norm_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.norm_k.training, 140591004393440) # if attn.norm_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1729 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.norm_k.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[16].attn.norm_k.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.norm_k._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.norm_k.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.norm_k.weight, 140581772749424) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.norm_k._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.norm_k._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.norm_k._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.norm_k._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.norm_q, accessed_by=DictGetItemGuardAccessor(norm_q) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.norm_q, 140581769901920) # if attn.norm_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1727 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.norm_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[16].attn.norm_q.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.norm_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.norm_q.training, 140591004393440) # if attn.norm_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1727 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.norm_q.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[16].attn.norm_q.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.norm_q._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.norm_q.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.norm_q.weight, 140581772748704) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.norm_q._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.norm_q._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.norm_q._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.norm_q._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_out, accessed_by=DictGetItemGuardAccessor(to_out) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.to_out, 140581769902352) # hidden_states = attn.to_out[0](hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1776 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_out.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_out.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.to_out.training, 140591004393440) # hidden_states = attn.to_out[0](hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1776 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_out[0], accessed_by=GetItemGuardAccessor(0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.to_out[0], 140533114722272) # hidden_states = attn.to_out[0](hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1776 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_out[0].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[16].attn.to_out[0].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_out[0].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.to_out[0].training, 140591004393408) # hidden_states = attn.to_out[0](hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1776 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_out[0]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_out[0].lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.to_out[0].lora_A, 140533114720064) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_out[0].lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_out[0].lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.to_out[0].lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_out[0].lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.to_out[0].lora_A['default_0'], 140533114717952) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_out[0].lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_out[0].lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.to_out[0].lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_out[0].lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_out[0].lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.to_out[0].lora_A['default_0'].weight, 140537316663888) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_out[0].lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.to_out[0].lora_B, 140533114719824) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_out[0].lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_out[0].lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.to_out[0].lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_out[0].lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.to_out[0].lora_B['default_0'], 140533114718768) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_out[0].lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_out[0].lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.to_out[0].lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_out[0].base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.to_out[0].base_layer, 140581769902400) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_out[0].base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_out[0].base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.to_out[0].base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_out[0].lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.to_out[0].lora_dropout, 140533114722176) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_out[0].lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_out[0].lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.to_out[0].lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_out[0].lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.to_out[0].lora_dropout['default_0'], 140533114722464) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_out[0].lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_out[0].lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.to_out[0].lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_out[0].scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[16].attn.to_out[0].scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[16].attn.to_out[0].scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_out[0].scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[16].attn.to_out[0].scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_out[0].use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[16].attn.to_out[0].use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[16].attn.to_out[0].use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_out[0].use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.to_out[0].use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_out[0]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_out[0]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_out[0].merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[16].attn.to_out[0].merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[16].attn.to_out[0].merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_out[0]._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.to_out[0]._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_out[0]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_out[0]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_out[0]._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[16].attn.to_out[0]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[16].attn.to_out[0]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_out[1], accessed_by=GetItemGuardAccessor(1) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.to_out[1], 140581769902448) # hidden_states = attn.to_out[1](hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1778 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_out[1].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_out[1].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.to_out[1].training, 140591004393440) # hidden_states = attn.to_out[1](hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1778 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.add_k_proj, accessed_by=DictGetItemGuardAccessor(add_k_proj) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.add_k_proj, 140533116578656) # encoder_hidden_states_key_proj = attn.add_k_proj(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1736 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.add_k_proj.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[16].attn.add_k_proj.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.add_k_proj.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.add_k_proj.training, 140591004393408) # encoder_hidden_states_key_proj = attn.add_k_proj(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1736 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.add_k_proj._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.add_k_proj.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.add_k_proj.lora_A, 140533114767632) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.add_k_proj.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.add_k_proj.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.add_k_proj.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.add_k_proj.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.add_k_proj.lora_A['default_0'], 140533114780496) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.add_k_proj.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.add_k_proj.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.add_k_proj.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.add_k_proj.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.add_k_proj.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.add_k_proj.lora_A['default_0'].weight, 140537316670768) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.add_k_proj.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.add_k_proj.lora_B, 140533114767344) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.add_k_proj.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.add_k_proj.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.add_k_proj.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.add_k_proj.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.add_k_proj.lora_B['default_0'], 140533114780304) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.add_k_proj.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.add_k_proj.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.add_k_proj.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.add_k_proj.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.add_k_proj.base_layer, 140581769902208) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.add_k_proj.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.add_k_proj.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.add_k_proj.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.add_k_proj.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.add_k_proj.lora_dropout, 140533116568576) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.add_k_proj.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.add_k_proj.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.add_k_proj.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.add_k_proj.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.add_k_proj.lora_dropout['default_0'], 140533116571312) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.add_k_proj.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.add_k_proj.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.add_k_proj.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.add_k_proj.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[16].attn.add_k_proj.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[16].attn.add_k_proj.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.add_k_proj.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[16].attn.add_k_proj.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.add_k_proj.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[16].attn.add_k_proj.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[16].attn.add_k_proj.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.add_k_proj.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.add_k_proj.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.add_k_proj._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.add_k_proj._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.add_k_proj.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[16].attn.add_k_proj.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[16].attn.add_k_proj.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.add_k_proj._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.add_k_proj._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.add_k_proj._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.add_k_proj._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.add_k_proj._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[16].attn.add_k_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[16].attn.add_k_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.add_q_proj, accessed_by=DictGetItemGuardAccessor(add_q_proj) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.add_q_proj, 140533114728512) # encoder_hidden_states_query_proj = attn.add_q_proj(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1735 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.add_q_proj.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[16].attn.add_q_proj.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.add_q_proj.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.add_q_proj.training, 140591004393408) # encoder_hidden_states_query_proj = attn.add_q_proj(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1735 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.add_q_proj._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.add_q_proj.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.add_q_proj.lora_A, 140533114728128) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.add_q_proj.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.add_q_proj.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.add_q_proj.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.add_q_proj.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.add_q_proj.lora_A['default_0'], 140533114719008) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.add_q_proj.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.add_q_proj.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.add_q_proj.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.add_q_proj.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.add_q_proj.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.add_q_proj.lora_A['default_0'].weight, 140537316671168) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.add_q_proj.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.add_q_proj.lora_B, 140533114721360) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.add_q_proj.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.add_q_proj.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.add_q_proj.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.add_q_proj.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.add_q_proj.lora_B['default_0'], 140533114723616) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.add_q_proj.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.add_q_proj.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.add_q_proj.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.add_q_proj.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.add_q_proj.base_layer, 140581769902304) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.add_q_proj.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.add_q_proj.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.add_q_proj.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.add_q_proj.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.add_q_proj.lora_dropout, 140533114728368) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.add_q_proj.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.add_q_proj.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.add_q_proj.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.add_q_proj.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.add_q_proj.lora_dropout['default_0'], 140533114722512) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.add_q_proj.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.add_q_proj.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.add_q_proj.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.add_q_proj.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[16].attn.add_q_proj.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[16].attn.add_q_proj.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.add_q_proj.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[16].attn.add_q_proj.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.add_q_proj.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[16].attn.add_q_proj.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[16].attn.add_q_proj.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.add_q_proj.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.add_q_proj.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.add_q_proj._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.add_q_proj._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.add_q_proj.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[16].attn.add_q_proj.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[16].attn.add_q_proj.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.add_q_proj._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.add_q_proj._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.add_q_proj._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.add_q_proj._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.add_q_proj._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[16].attn.add_q_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[16].attn.add_q_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.add_v_proj, accessed_by=DictGetItemGuardAccessor(add_v_proj) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.add_v_proj, 140533114779824) # encoder_hidden_states_value_proj = attn.add_v_proj(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1737 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.add_v_proj.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[16].attn.add_v_proj.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.add_v_proj.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.add_v_proj.training, 140591004393408) # encoder_hidden_states_value_proj = attn.add_v_proj(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1737 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.add_v_proj._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.add_v_proj.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.add_v_proj.lora_A, 140533114776656) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.add_v_proj.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.add_v_proj.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.add_v_proj.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.add_v_proj.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.add_v_proj.lora_A['default_0'], 140533114728896) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.add_v_proj.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.add_v_proj.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.add_v_proj.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.add_v_proj.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.add_v_proj.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.add_v_proj.lora_A['default_0'].weight, 140537316658048) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.add_v_proj.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.add_v_proj.lora_B, 140533114769264) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.add_v_proj.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.add_v_proj.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.add_v_proj.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.add_v_proj.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.add_v_proj.lora_B['default_0'], 140533114728752) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.add_v_proj.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.add_v_proj.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.add_v_proj.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.add_v_proj.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.add_v_proj.base_layer, 140581769902256) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.add_v_proj.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.add_v_proj.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.add_v_proj.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.add_v_proj.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.add_v_proj.lora_dropout, 140533114769360) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.add_v_proj.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.add_v_proj.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.add_v_proj.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.add_v_proj.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.add_v_proj.lora_dropout['default_0'], 140533114780256) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.add_v_proj.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.add_v_proj.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.add_v_proj.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.add_v_proj.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[16].attn.add_v_proj.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[16].attn.add_v_proj.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.add_v_proj.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[16].attn.add_v_proj.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.add_v_proj.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[16].attn.add_v_proj.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[16].attn.add_v_proj.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.add_v_proj.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.add_v_proj.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.add_v_proj._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.add_v_proj._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.add_v_proj.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[16].attn.add_v_proj.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[16].attn.add_v_proj.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.add_v_proj._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.add_v_proj._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.add_v_proj._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.add_v_proj._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.add_v_proj._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[16].attn.add_v_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[16].attn.add_v_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_add_out, accessed_by=DictGetItemGuardAccessor(to_add_out) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.to_add_out, 140533114730624) # encoder_hidden_states = attn.to_add_out(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1779 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_add_out.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[16].attn.to_add_out.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_add_out.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.to_add_out.training, 140591004393408) # encoder_hidden_states = attn.to_add_out(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1779 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_add_out._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_add_out.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.to_add_out.lora_A, 140533114763632) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_add_out.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_add_out.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.to_add_out.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_add_out.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.to_add_out.lora_A['default_0'], 140533115043216) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_add_out.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_add_out.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.to_add_out.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_add_out.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_add_out.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.to_add_out.lora_A['default_0'].weight, 140537316658768) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_add_out.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.to_add_out.lora_B, 140533114749808) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_add_out.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_add_out.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.to_add_out.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_add_out.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.to_add_out.lora_B['default_0'], 140533115043312) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_add_out.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_add_out.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.to_add_out.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_add_out.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.to_add_out.base_layer, 140581769902496) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_add_out.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_add_out.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.to_add_out.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_add_out.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.to_add_out.lora_dropout, 140533114755424) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_add_out.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_add_out.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.to_add_out.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_add_out.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.to_add_out.lora_dropout['default_0'], 140533114758688) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_add_out.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_add_out.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.to_add_out.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_add_out.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[16].attn.to_add_out.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[16].attn.to_add_out.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_add_out.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[16].attn.to_add_out.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_add_out.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[16].attn.to_add_out.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[16].attn.to_add_out.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_add_out.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.to_add_out.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_add_out._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_add_out._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_add_out.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[16].attn.to_add_out.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[16].attn.to_add_out.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_add_out._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.to_add_out._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_add_out._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_add_out._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_add_out._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[16].attn.to_add_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[16].attn.to_add_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.norm_added_k, accessed_by=DictGetItemGuardAccessor(norm_added_k) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.norm_added_k, 140581769902640) # if attn.norm_added_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1751 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.norm_added_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[16].attn.norm_added_k.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.norm_added_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.norm_added_k.training, 140591004393440) # if attn.norm_added_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1751 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.norm_added_k.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[16].attn.norm_added_k.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.norm_added_k._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.norm_added_k.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.norm_added_k.weight, 140581772749264) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.norm_added_k._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.norm_added_k._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.norm_added_k._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.norm_added_k._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.norm_added_q, accessed_by=DictGetItemGuardAccessor(norm_added_q) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.norm_added_q, 140581769902544) # if attn.norm_added_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1749 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.norm_added_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[16].attn.norm_added_q.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.norm_added_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.norm_added_q.training, 140591004393440) # if attn.norm_added_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1749 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.norm_added_q.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[16].attn.norm_added_q.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.norm_added_q._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.norm_added_q.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.norm_added_q.weight, 140581772749344) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.norm_added_q._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.norm_added_q._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.norm_added_q._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.norm_added_q._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.heads, accessed_by=DictGetItemGuardAccessor(heads) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[16].attn.heads == 24 # head_dim = inner_dim // attn.heads # diffusers/src/diffusers/models/attention_processor.py:1721 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.processor, accessed_by=DictGetItemGuardAccessor(processor) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[16].attn.processor, 93831581524080) # attn_parameters = set(inspect.signature(self.processor.__call__).parameters.keys()) # diffusers/src/diffusers/models/attention_processor.py:479 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.processor, 140581769901776) # return self.processor( # diffusers/src/diffusers/models/attention_processor.py:490 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].norm1, accessed_by=DictGetItemGuardAccessor(norm1) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].norm1, 140581769901344) # norm_hidden_states, gate_msa, shift_mlp, scale_mlp, gate_mlp = self.norm1(hidden_states, emb=temb) # diffusers/src/diffusers/models/transformers/transformer_flux.py:165 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].norm1.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[16].norm1.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].norm1.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].norm1.training, 140591004393440) # norm_hidden_states, gate_msa, shift_mlp, scale_mlp, gate_mlp = self.norm1(hidden_states, emb=temb) # diffusers/src/diffusers/models/transformers/transformer_flux.py:165 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].norm1.emb, accessed_by=DictGetItemGuardAccessor(emb) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].norm1.emb, 140591004478624) # if self.emb is not None: # diffusers/src/diffusers/models/normalization.py:135 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].norm1._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].norm1.norm, accessed_by=DictGetItemGuardAccessor(norm) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].norm1.norm, 140581769901488) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:139 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].norm1.norm.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].norm1.norm.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].norm1.norm.training, 140591004393440) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:139 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].norm1.silu, accessed_by=DictGetItemGuardAccessor(silu) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].norm1.silu, 140581769901392) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].norm1.silu.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].norm1.silu.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].norm1.silu.training, 140591004393440) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].norm1.linear, accessed_by=DictGetItemGuardAccessor(linear) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].norm1.linear, 140533115208800) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].norm1.linear.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[16].norm1.linear.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].norm1.linear.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].norm1.linear.training, 140591004393408) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].norm1.linear._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].norm1.linear.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].norm1.linear.lora_A, 140533114648464) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].norm1.linear.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].norm1.linear.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].norm1.linear.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].norm1.linear.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].norm1.linear.lora_A['default_0'], 140533114649088) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].norm1.linear.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].norm1.linear.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].norm1.linear.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].norm1.linear.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].norm1.linear.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].norm1.linear.lora_A['default_0'].weight, 140537316866416) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].norm1.linear.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].norm1.linear.lora_B, 140533114648944) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].norm1.linear.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].norm1.linear.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].norm1.linear.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].norm1.linear.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].norm1.linear.lora_B['default_0'], 140533114649136) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].norm1.linear.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].norm1.linear.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].norm1.linear.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].norm1.linear.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].norm1.linear.base_layer, 140581769901440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].norm1.linear.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].norm1.linear.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].norm1.linear.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].norm1.linear.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].norm1.linear.lora_dropout, 140533114634928) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].norm1.linear.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].norm1.linear.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].norm1.linear.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].norm1.linear.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].norm1.linear.lora_dropout['default_0'], 140533115221664) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].norm1.linear.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].norm1.linear.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].norm1.linear.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].norm1.linear.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[16].norm1.linear.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[16].norm1.linear.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].norm1.linear.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[16].norm1.linear.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].norm1.linear.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[16].norm1.linear.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[16].norm1.linear.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].norm1.linear.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].norm1.linear.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].norm1.linear._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].norm1.linear._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].norm1.linear.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[16].norm1.linear.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[16].norm1.linear.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].norm1.linear._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].norm1.linear._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].norm1.linear._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].norm1.linear._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].norm1.linear._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[16].norm1.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[16].norm1.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].norm1._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].norm1._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].norm1._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].norm1._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].norm2, accessed_by=DictGetItemGuardAccessor(norm2) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].norm2, 140581769902688) # norm_hidden_states = self.norm2(hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:182 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].norm2.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].norm2.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].norm2.training, 140591004393440) # norm_hidden_states = self.norm2(hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:182 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff_context, accessed_by=DictGetItemGuardAccessor(ff_context) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].ff_context, 140581769903024) # context_ff_output = self.ff_context(norm_encoder_hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:198 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff_context.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[16].ff_context.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff_context.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].ff_context.training, 140591004393440) # context_ff_output = self.ff_context(norm_encoder_hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:198 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff_context._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff_context.net, accessed_by=DictGetItemGuardAccessor(net) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].ff_context.net, 140581769903168) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[16].ff_context.net, 93831537618768) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- LENGTH_CHECK: len(L['self'].transformer_blocks[16].ff_context.net) == 3 # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff_context.net.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff_context.net.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].ff_context.net.training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff_context.net[0], accessed_by=GetItemGuardAccessor(0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].ff_context.net[0], 140581769903120) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff_context.net[0].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[16].ff_context.net[0].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff_context.net[0].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].ff_context.net[0].training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff_context.net[0]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff_context.net[0].proj, accessed_by=DictGetItemGuardAccessor(proj) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].ff_context.net[0].proj, 140533115041584) # hidden_states = self.proj(hidden_states) # diffusers/src/diffusers/models/activations.py:88 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff_context.net[0].proj.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[16].ff_context.net[0].proj.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff_context.net[0].proj.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].ff_context.net[0].proj.training, 140591004393408) # hidden_states = self.proj(hidden_states) # diffusers/src/diffusers/models/activations.py:88 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff_context.net[0].proj._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff_context.net[0].proj.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].ff_context.net[0].proj.lora_A, 140533115040480) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff_context.net[0].proj.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff_context.net[0].proj.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].ff_context.net[0].proj.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff_context.net[0].proj.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].ff_context.net[0].proj.lora_A['default_0'], 140533115624016) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff_context.net[0].proj.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff_context.net[0].proj.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].ff_context.net[0].proj.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff_context.net[0].proj.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff_context.net[0].proj.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].ff_context.net[0].proj.lora_A['default_0'].weight, 140537316530016) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff_context.net[0].proj.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].ff_context.net[0].proj.lora_B, 140533115039232) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff_context.net[0].proj.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff_context.net[0].proj.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].ff_context.net[0].proj.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff_context.net[0].proj.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].ff_context.net[0].proj.lora_B['default_0'], 140533115630640) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff_context.net[0].proj.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff_context.net[0].proj.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].ff_context.net[0].proj.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff_context.net[0].proj.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].ff_context.net[0].proj.base_layer, 140581769903216) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff_context.net[0].proj.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff_context.net[0].proj.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].ff_context.net[0].proj.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff_context.net[0].proj.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].ff_context.net[0].proj.lora_dropout, 140533115038224) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff_context.net[0].proj.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff_context.net[0].proj.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].ff_context.net[0].proj.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff_context.net[0].proj.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].ff_context.net[0].proj.lora_dropout['default_0'], 140533115038272) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff_context.net[0].proj.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff_context.net[0].proj.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].ff_context.net[0].proj.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff_context.net[0].proj.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[16].ff_context.net[0].proj.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[16].ff_context.net[0].proj.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff_context.net[0].proj.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[16].ff_context.net[0].proj.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff_context.net[0].proj.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[16].ff_context.net[0].proj.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[16].ff_context.net[0].proj.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff_context.net[0].proj.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].ff_context.net[0].proj.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff_context.net[0].proj._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff_context.net[0].proj._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff_context.net[0].proj.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[16].ff_context.net[0].proj.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[16].ff_context.net[0].proj.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff_context.net[0].proj._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].ff_context.net[0].proj._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff_context.net[0].proj._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff_context.net[0].proj._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff_context.net[0].proj._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[16].ff_context.net[0].proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[16].ff_context.net[0].proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff_context.net[0].approximate, accessed_by=DictGetItemGuardAccessor(approximate) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[16].ff_context.net[0].approximate == 'tanh' # return F.gelu(gate, approximate=self.approximate) # diffusers/src/diffusers/models/activations.py:83 in gelu V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff_context.net[0]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff_context.net[0]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff_context.net[0]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff_context.net[0]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff_context.net[1], accessed_by=GetItemGuardAccessor(1) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].ff_context.net[1], 140581769903312) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff_context.net[1].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff_context.net[1].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].ff_context.net[1].training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff_context.net[2], accessed_by=GetItemGuardAccessor(2) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].ff_context.net[2], 140533115630448) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff_context.net[2].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[16].ff_context.net[2].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff_context.net[2].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].ff_context.net[2].training, 140591004393408) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff_context.net[2]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff_context.net[2].lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].ff_context.net[2].lora_A, 140533115630784) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff_context.net[2].lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff_context.net[2].lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].ff_context.net[2].lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff_context.net[2].lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].ff_context.net[2].lora_A['default_0'], 140533115632560) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff_context.net[2].lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff_context.net[2].lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].ff_context.net[2].lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff_context.net[2].lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff_context.net[2].lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].ff_context.net[2].lora_A['default_0'].weight, 140537316535936) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff_context.net[2].lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].ff_context.net[2].lora_B, 140533115629728) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff_context.net[2].lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff_context.net[2].lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].ff_context.net[2].lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff_context.net[2].lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].ff_context.net[2].lora_B['default_0'], 140533115629776) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff_context.net[2].lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff_context.net[2].lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].ff_context.net[2].lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff_context.net[2].base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].ff_context.net[2].base_layer, 140581769903360) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff_context.net[2].base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff_context.net[2].base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].ff_context.net[2].base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff_context.net[2].lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].ff_context.net[2].lora_dropout, 140533115630496) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff_context.net[2].lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff_context.net[2].lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].ff_context.net[2].lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff_context.net[2].lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].ff_context.net[2].lora_dropout['default_0'], 140533115629344) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff_context.net[2].lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff_context.net[2].lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].ff_context.net[2].lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff_context.net[2].scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[16].ff_context.net[2].scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[16].ff_context.net[2].scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff_context.net[2].scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[16].ff_context.net[2].scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff_context.net[2].use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[16].ff_context.net[2].use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[16].ff_context.net[2].use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff_context.net[2].use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].ff_context.net[2].use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff_context.net[2]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff_context.net[2]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff_context.net[2].merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[16].ff_context.net[2].merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[16].ff_context.net[2].merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff_context.net[2]._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].ff_context.net[2]._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff_context.net[2]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff_context.net[2]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff_context.net[2]._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[16].ff_context.net[2]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[16].ff_context.net[2]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff_context._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff_context._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff_context._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff_context._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].norm1_context, accessed_by=DictGetItemGuardAccessor(norm1_context) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].norm1_context, 140581769901536) # norm_encoder_hidden_states, c_gate_msa, c_shift_mlp, c_scale_mlp, c_gate_mlp = self.norm1_context( # diffusers/src/diffusers/models/transformers/transformer_flux.py:167 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].norm1_context.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[16].norm1_context.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].norm1_context.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].norm1_context.training, 140591004393440) # norm_encoder_hidden_states, c_gate_msa, c_shift_mlp, c_scale_mlp, c_gate_mlp = self.norm1_context( # diffusers/src/diffusers/models/transformers/transformer_flux.py:167 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].norm1_context.emb, accessed_by=DictGetItemGuardAccessor(emb) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].norm1_context.emb, 140591004478624) # if self.emb is not None: # diffusers/src/diffusers/models/normalization.py:135 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].norm1_context._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].norm1_context.norm, accessed_by=DictGetItemGuardAccessor(norm) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].norm1_context.norm, 140581769901728) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:139 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].norm1_context.norm.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].norm1_context.norm.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].norm1_context.norm.training, 140591004393440) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:139 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].norm1_context.silu, accessed_by=DictGetItemGuardAccessor(silu) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].norm1_context.silu, 140581769901632) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].norm1_context.silu.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].norm1_context.silu.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].norm1_context.silu.training, 140591004393440) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].norm1_context.linear, accessed_by=DictGetItemGuardAccessor(linear) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].norm1_context.linear, 140533114650288) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].norm1_context.linear.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[16].norm1_context.linear.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].norm1_context.linear.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].norm1_context.linear.training, 140591004393408) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].norm1_context.linear._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].norm1_context.linear.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].norm1_context.linear.lora_A, 140533114639920) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].norm1_context.linear.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].norm1_context.linear.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].norm1_context.linear.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].norm1_context.linear.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].norm1_context.linear.lora_A['default_0'], 140533114642896) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].norm1_context.linear.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].norm1_context.linear.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].norm1_context.linear.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].norm1_context.linear.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].norm1_context.linear.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].norm1_context.linear.lora_A['default_0'].weight, 140537316863936) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].norm1_context.linear.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].norm1_context.linear.lora_B, 140533114643088) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].norm1_context.linear.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].norm1_context.linear.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].norm1_context.linear.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].norm1_context.linear.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].norm1_context.linear.lora_B['default_0'], 140533114647264) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].norm1_context.linear.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].norm1_context.linear.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].norm1_context.linear.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].norm1_context.linear.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].norm1_context.linear.base_layer, 140581769901680) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].norm1_context.linear.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].norm1_context.linear.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].norm1_context.linear.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].norm1_context.linear.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].norm1_context.linear.lora_dropout, 140533114650576) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].norm1_context.linear.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].norm1_context.linear.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].norm1_context.linear.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].norm1_context.linear.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].norm1_context.linear.lora_dropout['default_0'], 140533114650480) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].norm1_context.linear.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].norm1_context.linear.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].norm1_context.linear.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].norm1_context.linear.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[16].norm1_context.linear.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[16].norm1_context.linear.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].norm1_context.linear.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[16].norm1_context.linear.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].norm1_context.linear.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[16].norm1_context.linear.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[16].norm1_context.linear.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].norm1_context.linear.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].norm1_context.linear.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].norm1_context.linear._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].norm1_context.linear._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].norm1_context.linear.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[16].norm1_context.linear.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[16].norm1_context.linear.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].norm1_context.linear._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].norm1_context.linear._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].norm1_context.linear._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].norm1_context.linear._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].norm1_context.linear._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[16].norm1_context.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[16].norm1_context.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].norm1_context._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].norm1_context._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].norm1_context._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].norm1_context._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].norm2_context, accessed_by=DictGetItemGuardAccessor(norm2_context) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].norm2_context, 140581769902736) # norm_encoder_hidden_states = self.norm2_context(encoder_hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:195 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].norm2_context.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].norm2_context.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].norm2_context.training, 140591004393440) # norm_encoder_hidden_states = self.norm2_context(encoder_hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:195 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | +- GuardManager: source=L['self'].transformer_blocks[17], accessed_by=GetItemGuardAccessor(17) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17], 140581769901008) # for index_block, block in enumerate(self.transformer_blocks): # diffusers/src/diffusers/models/transformers/transformer_flux.py:471 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[17].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].training, 140591004393440) # for index_block, block in enumerate(self.transformer_blocks): # diffusers/src/diffusers/models/transformers/transformer_flux.py:471 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff, accessed_by=DictGetItemGuardAccessor(ff) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].ff, 140581770183248) # ff_output = self.ff(norm_hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:185 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[17].ff.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].ff.training, 140591004393440) # ff_output = self.ff(norm_hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:185 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff.net, accessed_by=DictGetItemGuardAccessor(net) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].ff.net, 140581770183536) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[17].ff.net, 93831537618768) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- LENGTH_CHECK: len(L['self'].transformer_blocks[17].ff.net) == 3 # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff.net.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff.net.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].ff.net.training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff.net[0], accessed_by=GetItemGuardAccessor(0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].ff.net[0], 140581770183488) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff.net[0].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[17].ff.net[0].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff.net[0].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].ff.net[0].training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff.net[0]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff.net[0].proj, accessed_by=DictGetItemGuardAccessor(proj) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].ff.net[0].proj, 140533115825376) # hidden_states = self.proj(hidden_states) # diffusers/src/diffusers/models/activations.py:88 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff.net[0].proj.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[17].ff.net[0].proj.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff.net[0].proj.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].ff.net[0].proj.training, 140591004393408) # hidden_states = self.proj(hidden_states) # diffusers/src/diffusers/models/activations.py:88 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff.net[0].proj._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff.net[0].proj.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].ff.net[0].proj.lora_A, 140533115825184) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff.net[0].proj.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff.net[0].proj.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].ff.net[0].proj.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff.net[0].proj.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].ff.net[0].proj.lora_A['default_0'], 140533115816256) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff.net[0].proj.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff.net[0].proj.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].ff.net[0].proj.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff.net[0].proj.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff.net[0].proj.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].ff.net[0].proj.lora_A['default_0'].weight, 140537318313872) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff.net[0].proj.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].ff.net[0].proj.lora_B, 140533115824320) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff.net[0].proj.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff.net[0].proj.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].ff.net[0].proj.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff.net[0].proj.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].ff.net[0].proj.lora_B['default_0'], 140533115817744) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff.net[0].proj.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff.net[0].proj.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].ff.net[0].proj.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff.net[0].proj.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].ff.net[0].proj.base_layer, 140581770183584) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff.net[0].proj.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff.net[0].proj.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].ff.net[0].proj.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff.net[0].proj.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].ff.net[0].proj.lora_dropout, 140533115827584) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff.net[0].proj.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff.net[0].proj.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].ff.net[0].proj.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff.net[0].proj.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].ff.net[0].proj.lora_dropout['default_0'], 140533115825520) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff.net[0].proj.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff.net[0].proj.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].ff.net[0].proj.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff.net[0].proj.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[17].ff.net[0].proj.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[17].ff.net[0].proj.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff.net[0].proj.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[17].ff.net[0].proj.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff.net[0].proj.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[17].ff.net[0].proj.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[17].ff.net[0].proj.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff.net[0].proj.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].ff.net[0].proj.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff.net[0].proj._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff.net[0].proj._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff.net[0].proj.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[17].ff.net[0].proj.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[17].ff.net[0].proj.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff.net[0].proj._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].ff.net[0].proj._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff.net[0].proj._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff.net[0].proj._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff.net[0].proj._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[17].ff.net[0].proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[17].ff.net[0].proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff.net[0].approximate, accessed_by=DictGetItemGuardAccessor(approximate) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[17].ff.net[0].approximate == 'tanh' # return F.gelu(gate, approximate=self.approximate) # diffusers/src/diffusers/models/activations.py:83 in gelu V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff.net[0]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff.net[0]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff.net[0]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff.net[0]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff.net[1], accessed_by=GetItemGuardAccessor(1) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].ff.net[1], 140581770183632) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff.net[1].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff.net[1].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].ff.net[1].training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff.net[2], accessed_by=GetItemGuardAccessor(2) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].ff.net[2], 140533115820480) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff.net[2].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[17].ff.net[2].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff.net[2].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].ff.net[2].training, 140591004393408) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff.net[2]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff.net[2].lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].ff.net[2].lora_A, 140533115822928) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff.net[2].lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff.net[2].lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].ff.net[2].lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff.net[2].lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].ff.net[2].lora_A['default_0'], 140533114803088) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff.net[2].lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff.net[2].lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].ff.net[2].lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff.net[2].lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff.net[2].lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].ff.net[2].lora_A['default_0'].weight, 140537318324832) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff.net[2].lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].ff.net[2].lora_B, 140533115817984) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff.net[2].lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff.net[2].lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].ff.net[2].lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff.net[2].lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].ff.net[2].lora_B['default_0'], 140533114801024) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff.net[2].lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff.net[2].lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].ff.net[2].lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff.net[2].base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].ff.net[2].base_layer, 140581770183680) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff.net[2].base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff.net[2].base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].ff.net[2].base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff.net[2].lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].ff.net[2].lora_dropout, 140533115828832) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff.net[2].lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff.net[2].lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].ff.net[2].lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff.net[2].lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].ff.net[2].lora_dropout['default_0'], 140533115828784) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff.net[2].lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff.net[2].lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].ff.net[2].lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff.net[2].scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[17].ff.net[2].scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[17].ff.net[2].scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff.net[2].scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[17].ff.net[2].scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff.net[2].use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[17].ff.net[2].use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[17].ff.net[2].use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff.net[2].use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].ff.net[2].use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff.net[2]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff.net[2]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff.net[2].merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[17].ff.net[2].merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[17].ff.net[2].merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff.net[2]._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].ff.net[2]._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff.net[2]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff.net[2]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff.net[2]._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[17].ff.net[2]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[17].ff.net[2]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn, accessed_by=DictGetItemGuardAccessor(attn) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn, 140581769903888) # attn_output, context_attn_output = self.attn( # diffusers/src/diffusers/models/transformers/transformer_flux.py:172 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[17].attn.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.training, 140591004393440) # attn_output, context_attn_output = self.attn( # diffusers/src/diffusers/models/transformers/transformer_flux.py:172 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_k, accessed_by=DictGetItemGuardAccessor(to_k) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.to_k, 140533114642080) # key = attn.to_k(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1717 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[17].attn.to_k.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.to_k.training, 140591004393408) # key = attn.to_k(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1717 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_k._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_k.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.to_k.lora_A, 140533115290480) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_k.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_k.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.to_k.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_k.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.to_k.lora_A['default_0'], 140533115304544) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_k.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_k.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.to_k.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_k.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_k.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.to_k.lora_A['default_0'].weight, 140537316391520) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_k.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.to_k.lora_B, 140533115295424) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_k.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_k.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.to_k.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_k.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.to_k.lora_B['default_0'], 140533115290528) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_k.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_k.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.to_k.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_k.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.to_k.base_layer, 140581770182720) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_k.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_k.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.to_k.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_k.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.to_k.lora_dropout, 140533115298064) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_k.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_k.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.to_k.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_k.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.to_k.lora_dropout['default_0'], 140533115299600) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_k.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_k.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.to_k.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_k.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[17].attn.to_k.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[17].attn.to_k.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_k.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[17].attn.to_k.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_k.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[17].attn.to_k.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[17].attn.to_k.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_k.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.to_k.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_k._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_k._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_k.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[17].attn.to_k.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[17].attn.to_k.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_k._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.to_k._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_k._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_k._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_k._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[17].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[17].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_q, accessed_by=DictGetItemGuardAccessor(to_q) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.to_q, 140533115627088) # query = attn.to_q(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1716 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[17].attn.to_q.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.to_q.training, 140591004393408) # query = attn.to_q(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1716 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_q._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_q.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.to_q.lora_A, 140533115622384) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_q.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_q.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.to_q.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_q.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.to_q.lora_A['default_0'], 140533116652560) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_q.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_q.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.to_q.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_q.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_q.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.to_q.lora_A['default_0'].weight, 140537316378320) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_q.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.to_q.lora_B, 140533115619792) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_q.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_q.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.to_q.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_q.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.to_q.lora_B['default_0'], 140533116655776) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_q.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_q.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.to_q.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_q.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.to_q.base_layer, 140581769904032) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_q.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_q.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.to_q.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_q.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.to_q.lora_dropout, 140533115623776) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_q.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_q.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.to_q.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_q.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.to_q.lora_dropout['default_0'], 140533115624304) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_q.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_q.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.to_q.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_q.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[17].attn.to_q.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[17].attn.to_q.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_q.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[17].attn.to_q.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_q.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[17].attn.to_q.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[17].attn.to_q.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_q.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.to_q.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_q._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_q._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_q.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[17].attn.to_q.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[17].attn.to_q.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_q._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.to_q._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_q._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_q._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_q._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[17].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[17].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_v, accessed_by=DictGetItemGuardAccessor(to_v) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.to_v, 140533115294752) # value = attn.to_v(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1718 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_v.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[17].attn.to_v.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_v.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.to_v.training, 140591004393408) # value = attn.to_v(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1718 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_v._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_v.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.to_v.lora_A, 140533115303920) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_v.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_v.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.to_v.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_v.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.to_v.lora_A['default_0'], 140533115302816) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_v.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_v.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.to_v.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_v.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_v.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.to_v.lora_A['default_0'].weight, 140537316384240) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_v.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.to_v.lora_B, 140533115305648) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_v.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_v.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.to_v.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_v.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.to_v.lora_B['default_0'], 140533115295472) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_v.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_v.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.to_v.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_v.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.to_v.base_layer, 140581770182816) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_v.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_v.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.to_v.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_v.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.to_v.lora_dropout, 140533115297056) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_v.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_v.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.to_v.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_v.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.to_v.lora_dropout['default_0'], 140533115289904) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_v.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_v.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.to_v.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_v.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[17].attn.to_v.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[17].attn.to_v.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_v.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[17].attn.to_v.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_v.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[17].attn.to_v.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[17].attn.to_v.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_v.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.to_v.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_v._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_v._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_v.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[17].attn.to_v.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[17].attn.to_v.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_v._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.to_v._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_v._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_v._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_v._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[17].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[17].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.norm_k, accessed_by=DictGetItemGuardAccessor(norm_k) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.norm_k, 140581769904080) # if attn.norm_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1729 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.norm_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[17].attn.norm_k.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.norm_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.norm_k.training, 140591004393440) # if attn.norm_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1729 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.norm_k.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[17].attn.norm_k.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.norm_k._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.norm_k.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.norm_k.weight, 140581772778192) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.norm_k._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.norm_k._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.norm_k._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.norm_k._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.norm_q, accessed_by=DictGetItemGuardAccessor(norm_q) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.norm_q, 140581769903984) # if attn.norm_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1727 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.norm_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[17].attn.norm_q.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.norm_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.norm_q.training, 140591004393440) # if attn.norm_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1727 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.norm_q.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[17].attn.norm_q.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.norm_q._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.norm_q.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.norm_q.weight, 140581772741744) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.norm_q._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.norm_q._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.norm_q._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.norm_q._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_out, accessed_by=DictGetItemGuardAccessor(to_out) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.to_out, 140581770183008) # hidden_states = attn.to_out[0](hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1776 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_out.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_out.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.to_out.training, 140591004393440) # hidden_states = attn.to_out[0](hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1776 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_out[0], accessed_by=GetItemGuardAccessor(0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.to_out[0], 140533115825808) # hidden_states = attn.to_out[0](hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1776 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_out[0].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[17].attn.to_out[0].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_out[0].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.to_out[0].training, 140591004393408) # hidden_states = attn.to_out[0](hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1776 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_out[0]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_out[0].lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.to_out[0].lora_A, 140533115823984) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_out[0].lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_out[0].lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.to_out[0].lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_out[0].lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.to_out[0].lora_A['default_0'], 140533115819040) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_out[0].lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_out[0].lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.to_out[0].lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_out[0].lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_out[0].lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.to_out[0].lora_A['default_0'].weight, 140537318322912) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_out[0].lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.to_out[0].lora_B, 140533115827872) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_out[0].lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_out[0].lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.to_out[0].lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_out[0].lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.to_out[0].lora_B['default_0'], 140533115816016) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_out[0].lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_out[0].lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.to_out[0].lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_out[0].base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.to_out[0].base_layer, 140581770183056) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_out[0].base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_out[0].base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.to_out[0].base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_out[0].lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.to_out[0].lora_dropout, 140533115826336) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_out[0].lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_out[0].lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.to_out[0].lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_out[0].lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.to_out[0].lora_dropout['default_0'], 140533115826480) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_out[0].lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_out[0].lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.to_out[0].lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_out[0].scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[17].attn.to_out[0].scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[17].attn.to_out[0].scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_out[0].scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[17].attn.to_out[0].scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_out[0].use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[17].attn.to_out[0].use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[17].attn.to_out[0].use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_out[0].use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.to_out[0].use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_out[0]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_out[0]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_out[0].merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[17].attn.to_out[0].merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[17].attn.to_out[0].merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_out[0]._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.to_out[0]._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_out[0]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_out[0]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_out[0]._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[17].attn.to_out[0]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[17].attn.to_out[0]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_out[1], accessed_by=GetItemGuardAccessor(1) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.to_out[1], 140581770183104) # hidden_states = attn.to_out[1](hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1778 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_out[1].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_out[1].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.to_out[1].training, 140591004393440) # hidden_states = attn.to_out[1](hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1778 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.add_k_proj, accessed_by=DictGetItemGuardAccessor(add_k_proj) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.add_k_proj, 140533115298016) # encoder_hidden_states_key_proj = attn.add_k_proj(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1736 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.add_k_proj.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[17].attn.add_k_proj.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.add_k_proj.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.add_k_proj.training, 140591004393408) # encoder_hidden_states_key_proj = attn.add_k_proj(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1736 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.add_k_proj._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.add_k_proj.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.add_k_proj.lora_A, 140533115294656) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.add_k_proj.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.add_k_proj.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.add_k_proj.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.add_k_proj.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.add_k_proj.lora_A['default_0'], 140533115944912) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.add_k_proj.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.add_k_proj.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.add_k_proj.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.add_k_proj.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.add_k_proj.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.add_k_proj.lora_A['default_0'].weight, 140537316389920) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.add_k_proj.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.add_k_proj.lora_B, 140533115941264) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.add_k_proj.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.add_k_proj.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.add_k_proj.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.add_k_proj.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.add_k_proj.lora_B['default_0'], 140533115943664) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.add_k_proj.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.add_k_proj.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.add_k_proj.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.add_k_proj.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.add_k_proj.base_layer, 140581770182864) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.add_k_proj.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.add_k_proj.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.add_k_proj.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.add_k_proj.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.add_k_proj.lora_dropout, 140533115304304) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.add_k_proj.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.add_k_proj.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.add_k_proj.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.add_k_proj.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.add_k_proj.lora_dropout['default_0'], 140533115290720) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.add_k_proj.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.add_k_proj.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.add_k_proj.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.add_k_proj.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[17].attn.add_k_proj.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[17].attn.add_k_proj.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.add_k_proj.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[17].attn.add_k_proj.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.add_k_proj.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[17].attn.add_k_proj.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[17].attn.add_k_proj.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.add_k_proj.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.add_k_proj.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.add_k_proj._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.add_k_proj._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.add_k_proj.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[17].attn.add_k_proj.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[17].attn.add_k_proj.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.add_k_proj._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.add_k_proj._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.add_k_proj._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.add_k_proj._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.add_k_proj._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[17].attn.add_k_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[17].attn.add_k_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.add_q_proj, accessed_by=DictGetItemGuardAccessor(add_q_proj) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.add_q_proj, 140533115819664) # encoder_hidden_states_query_proj = attn.add_q_proj(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1735 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.add_q_proj.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[17].attn.add_q_proj.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.add_q_proj.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.add_q_proj.training, 140591004393408) # encoder_hidden_states_query_proj = attn.add_q_proj(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1735 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.add_q_proj._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.add_q_proj.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.add_q_proj.lora_A, 140533115819616) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.add_q_proj.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.add_q_proj.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.add_q_proj.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.add_q_proj.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.add_q_proj.lora_A['default_0'], 140533115826432) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.add_q_proj.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.add_q_proj.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.add_q_proj.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.add_q_proj.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.add_q_proj.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.add_q_proj.lora_A['default_0'].weight, 140537316381440) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.add_q_proj.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.add_q_proj.lora_B, 140533115819472) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.add_q_proj.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.add_q_proj.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.add_q_proj.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.add_q_proj.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.add_q_proj.lora_B['default_0'], 140533115824224) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.add_q_proj.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.add_q_proj.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.add_q_proj.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.add_q_proj.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.add_q_proj.base_layer, 140581770182960) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.add_q_proj.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.add_q_proj.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.add_q_proj.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.add_q_proj.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.add_q_proj.lora_dropout, 140533115819712) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.add_q_proj.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.add_q_proj.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.add_q_proj.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.add_q_proj.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.add_q_proj.lora_dropout['default_0'], 140533115814816) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.add_q_proj.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.add_q_proj.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.add_q_proj.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.add_q_proj.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[17].attn.add_q_proj.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[17].attn.add_q_proj.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.add_q_proj.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[17].attn.add_q_proj.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.add_q_proj.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[17].attn.add_q_proj.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[17].attn.add_q_proj.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.add_q_proj.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.add_q_proj.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.add_q_proj._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.add_q_proj._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.add_q_proj.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[17].attn.add_q_proj.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[17].attn.add_q_proj.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.add_q_proj._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.add_q_proj._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.add_q_proj._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.add_q_proj._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.add_q_proj._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[17].attn.add_q_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[17].attn.add_q_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.add_v_proj, accessed_by=DictGetItemGuardAccessor(add_v_proj) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.add_v_proj, 140533115932384) # encoder_hidden_states_value_proj = attn.add_v_proj(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1737 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.add_v_proj.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[17].attn.add_v_proj.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.add_v_proj.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.add_v_proj.training, 140591004393408) # encoder_hidden_states_value_proj = attn.add_v_proj(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1737 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.add_v_proj._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.add_v_proj.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.add_v_proj.lora_A, 140533115931472) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.add_v_proj.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.add_v_proj.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.add_v_proj.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.add_v_proj.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.add_v_proj.lora_A['default_0'], 140533115819904) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.add_v_proj.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.add_v_proj.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.add_v_proj.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.add_v_proj.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.add_v_proj.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.add_v_proj.lora_A['default_0'].weight, 140537316382160) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.add_v_proj.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.add_v_proj.lora_B, 140533115939968) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.add_v_proj.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.add_v_proj.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.add_v_proj.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.add_v_proj.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.add_v_proj.lora_B['default_0'], 140533115818224) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.add_v_proj.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.add_v_proj.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.add_v_proj.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.add_v_proj.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.add_v_proj.base_layer, 140581770182912) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.add_v_proj.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.add_v_proj.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.add_v_proj.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.add_v_proj.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.add_v_proj.lora_dropout, 140533115942800) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.add_v_proj.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.add_v_proj.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.add_v_proj.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.add_v_proj.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.add_v_proj.lora_dropout['default_0'], 140533115941456) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.add_v_proj.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.add_v_proj.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.add_v_proj.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.add_v_proj.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[17].attn.add_v_proj.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[17].attn.add_v_proj.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.add_v_proj.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[17].attn.add_v_proj.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.add_v_proj.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[17].attn.add_v_proj.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[17].attn.add_v_proj.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.add_v_proj.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.add_v_proj.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.add_v_proj._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.add_v_proj._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.add_v_proj.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[17].attn.add_v_proj.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[17].attn.add_v_proj.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.add_v_proj._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.add_v_proj._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.add_v_proj._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.add_v_proj._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.add_v_proj._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[17].attn.add_v_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[17].attn.add_v_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_add_out, accessed_by=DictGetItemGuardAccessor(to_add_out) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.to_add_out, 140533115816688) # encoder_hidden_states = attn.to_add_out(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1779 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_add_out.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[17].attn.to_add_out.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_add_out.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.to_add_out.training, 140591004393408) # encoder_hidden_states = attn.to_add_out(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1779 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_add_out._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_add_out.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.to_add_out.lora_A, 140533115818032) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_add_out.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_add_out.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.to_add_out.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_add_out.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.to_add_out.lora_A['default_0'], 140533115829024) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_add_out.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_add_out.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.to_add_out.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_add_out.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_add_out.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.to_add_out.lora_A['default_0'].weight, 140537318326032) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_add_out.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.to_add_out.lora_B, 140533115824608) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_add_out.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_add_out.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.to_add_out.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_add_out.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.to_add_out.lora_B['default_0'], 140533115817408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_add_out.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_add_out.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.to_add_out.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_add_out.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.to_add_out.base_layer, 140581770183152) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_add_out.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_add_out.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.to_add_out.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_add_out.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.to_add_out.lora_dropout, 140533115827920) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_add_out.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_add_out.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.to_add_out.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_add_out.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.to_add_out.lora_dropout['default_0'], 140533115827680) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_add_out.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_add_out.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.to_add_out.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_add_out.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[17].attn.to_add_out.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[17].attn.to_add_out.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_add_out.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[17].attn.to_add_out.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_add_out.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[17].attn.to_add_out.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[17].attn.to_add_out.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_add_out.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.to_add_out.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_add_out._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_add_out._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_add_out.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[17].attn.to_add_out.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[17].attn.to_add_out.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_add_out._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.to_add_out._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_add_out._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_add_out._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_add_out._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[17].attn.to_add_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[17].attn.to_add_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.norm_added_k, accessed_by=DictGetItemGuardAccessor(norm_added_k) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.norm_added_k, 140581770183296) # if attn.norm_added_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1751 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.norm_added_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[17].attn.norm_added_k.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.norm_added_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.norm_added_k.training, 140591004393440) # if attn.norm_added_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1751 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.norm_added_k.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[17].attn.norm_added_k.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.norm_added_k._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.norm_added_k.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.norm_added_k.weight, 140581772742144) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.norm_added_k._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.norm_added_k._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.norm_added_k._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.norm_added_k._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.norm_added_q, accessed_by=DictGetItemGuardAccessor(norm_added_q) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.norm_added_q, 140581770183200) # if attn.norm_added_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1749 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.norm_added_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[17].attn.norm_added_q.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.norm_added_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.norm_added_q.training, 140591004393440) # if attn.norm_added_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1749 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.norm_added_q.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[17].attn.norm_added_q.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.norm_added_q._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.norm_added_q.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.norm_added_q.weight, 140581772745504) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.norm_added_q._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.norm_added_q._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.norm_added_q._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.norm_added_q._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.heads, accessed_by=DictGetItemGuardAccessor(heads) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[17].attn.heads == 24 # head_dim = inner_dim // attn.heads # diffusers/src/diffusers/models/attention_processor.py:1721 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.processor, accessed_by=DictGetItemGuardAccessor(processor) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[17].attn.processor, 93831581524080) # attn_parameters = set(inspect.signature(self.processor.__call__).parameters.keys()) # diffusers/src/diffusers/models/attention_processor.py:479 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.processor, 140581769903840) # return self.processor( # diffusers/src/diffusers/models/attention_processor.py:490 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].norm1, accessed_by=DictGetItemGuardAccessor(norm1) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].norm1, 140581769903408) # norm_hidden_states, gate_msa, shift_mlp, scale_mlp, gate_mlp = self.norm1(hidden_states, emb=temb) # diffusers/src/diffusers/models/transformers/transformer_flux.py:165 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].norm1.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[17].norm1.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].norm1.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].norm1.training, 140591004393440) # norm_hidden_states, gate_msa, shift_mlp, scale_mlp, gate_mlp = self.norm1(hidden_states, emb=temb) # diffusers/src/diffusers/models/transformers/transformer_flux.py:165 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].norm1.emb, accessed_by=DictGetItemGuardAccessor(emb) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].norm1.emb, 140591004478624) # if self.emb is not None: # diffusers/src/diffusers/models/normalization.py:135 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].norm1._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].norm1.norm, accessed_by=DictGetItemGuardAccessor(norm) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].norm1.norm, 140581769903552) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:139 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].norm1.norm.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].norm1.norm.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].norm1.norm.training, 140591004393440) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:139 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].norm1.silu, accessed_by=DictGetItemGuardAccessor(silu) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].norm1.silu, 140581769903456) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].norm1.silu.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].norm1.silu.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].norm1.silu.training, 140591004393440) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].norm1.linear, accessed_by=DictGetItemGuardAccessor(linear) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].norm1.linear, 140533115625216) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].norm1.linear.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[17].norm1.linear.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].norm1.linear.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].norm1.linear.training, 140591004393408) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].norm1.linear._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].norm1.linear.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].norm1.linear.lora_A, 140533115627904) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].norm1.linear.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].norm1.linear.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].norm1.linear.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].norm1.linear.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].norm1.linear.lora_A['default_0'], 140533115623200) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].norm1.linear.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].norm1.linear.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].norm1.linear.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].norm1.linear.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].norm1.linear.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].norm1.linear.lora_A['default_0'].weight, 140537316536336) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].norm1.linear.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].norm1.linear.lora_B, 140533115629584) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].norm1.linear.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].norm1.linear.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].norm1.linear.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].norm1.linear.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].norm1.linear.lora_B['default_0'], 140533115627616) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].norm1.linear.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].norm1.linear.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].norm1.linear.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].norm1.linear.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].norm1.linear.base_layer, 140581769903504) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].norm1.linear.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].norm1.linear.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].norm1.linear.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].norm1.linear.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].norm1.linear.lora_dropout, 140533115625600) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].norm1.linear.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].norm1.linear.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].norm1.linear.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].norm1.linear.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].norm1.linear.lora_dropout['default_0'], 140533115625264) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].norm1.linear.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].norm1.linear.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].norm1.linear.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].norm1.linear.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[17].norm1.linear.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[17].norm1.linear.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].norm1.linear.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[17].norm1.linear.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].norm1.linear.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[17].norm1.linear.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[17].norm1.linear.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].norm1.linear.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].norm1.linear.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].norm1.linear._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].norm1.linear._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].norm1.linear.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[17].norm1.linear.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[17].norm1.linear.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].norm1.linear._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].norm1.linear._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].norm1.linear._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].norm1.linear._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].norm1.linear._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[17].norm1.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[17].norm1.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].norm1._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].norm1._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].norm1._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].norm1._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].norm2, accessed_by=DictGetItemGuardAccessor(norm2) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].norm2, 140581770183344) # norm_hidden_states = self.norm2(hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:182 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].norm2.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].norm2.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].norm2.training, 140591004393440) # norm_hidden_states = self.norm2(hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:182 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff_context, accessed_by=DictGetItemGuardAccessor(ff_context) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].ff_context, 140581770183728) # context_ff_output = self.ff_context(norm_encoder_hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:198 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff_context.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[17].ff_context.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff_context.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].ff_context.training, 140591004393440) # context_ff_output = self.ff_context(norm_encoder_hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:198 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff_context._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff_context.net, accessed_by=DictGetItemGuardAccessor(net) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].ff_context.net, 140581770183872) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[17].ff_context.net, 93831537618768) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- LENGTH_CHECK: len(L['self'].transformer_blocks[17].ff_context.net) == 3 # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff_context.net.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff_context.net.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].ff_context.net.training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff_context.net[0], accessed_by=GetItemGuardAccessor(0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].ff_context.net[0], 140581770183824) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff_context.net[0].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[17].ff_context.net[0].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff_context.net[0].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].ff_context.net[0].training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff_context.net[0]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff_context.net[0].proj, accessed_by=DictGetItemGuardAccessor(proj) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].ff_context.net[0].proj, 140533114810576) # hidden_states = self.proj(hidden_states) # diffusers/src/diffusers/models/activations.py:88 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff_context.net[0].proj.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[17].ff_context.net[0].proj.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff_context.net[0].proj.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].ff_context.net[0].proj.training, 140591004393408) # hidden_states = self.proj(hidden_states) # diffusers/src/diffusers/models/activations.py:88 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff_context.net[0].proj._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff_context.net[0].proj.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].ff_context.net[0].proj.lora_A, 140533114811728) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff_context.net[0].proj.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff_context.net[0].proj.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].ff_context.net[0].proj.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff_context.net[0].proj.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].ff_context.net[0].proj.lora_A['default_0'], 140533114801696) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff_context.net[0].proj.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff_context.net[0].proj.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].ff_context.net[0].proj.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff_context.net[0].proj.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff_context.net[0].proj.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].ff_context.net[0].proj.lora_A['default_0'].weight, 140537318320192) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff_context.net[0].proj.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].ff_context.net[0].proj.lora_B, 140533114805968) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff_context.net[0].proj.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff_context.net[0].proj.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].ff_context.net[0].proj.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff_context.net[0].proj.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].ff_context.net[0].proj.lora_B['default_0'], 140533116318912) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff_context.net[0].proj.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff_context.net[0].proj.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].ff_context.net[0].proj.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff_context.net[0].proj.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].ff_context.net[0].proj.base_layer, 140581770183920) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff_context.net[0].proj.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff_context.net[0].proj.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].ff_context.net[0].proj.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff_context.net[0].proj.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].ff_context.net[0].proj.lora_dropout, 140533114810384) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff_context.net[0].proj.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff_context.net[0].proj.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].ff_context.net[0].proj.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff_context.net[0].proj.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].ff_context.net[0].proj.lora_dropout['default_0'], 140533114810672) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff_context.net[0].proj.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff_context.net[0].proj.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].ff_context.net[0].proj.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff_context.net[0].proj.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[17].ff_context.net[0].proj.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[17].ff_context.net[0].proj.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff_context.net[0].proj.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[17].ff_context.net[0].proj.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff_context.net[0].proj.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[17].ff_context.net[0].proj.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[17].ff_context.net[0].proj.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff_context.net[0].proj.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].ff_context.net[0].proj.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff_context.net[0].proj._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff_context.net[0].proj._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff_context.net[0].proj.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[17].ff_context.net[0].proj.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[17].ff_context.net[0].proj.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff_context.net[0].proj._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].ff_context.net[0].proj._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff_context.net[0].proj._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff_context.net[0].proj._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff_context.net[0].proj._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[17].ff_context.net[0].proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[17].ff_context.net[0].proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff_context.net[0].approximate, accessed_by=DictGetItemGuardAccessor(approximate) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[17].ff_context.net[0].approximate == 'tanh' # return F.gelu(gate, approximate=self.approximate) # diffusers/src/diffusers/models/activations.py:83 in gelu V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff_context.net[0]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff_context.net[0]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff_context.net[0]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff_context.net[0]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff_context.net[1], accessed_by=GetItemGuardAccessor(1) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].ff_context.net[1], 140581770184016) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff_context.net[1].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff_context.net[1].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].ff_context.net[1].training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff_context.net[2], accessed_by=GetItemGuardAccessor(2) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].ff_context.net[2], 140533116309888) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff_context.net[2].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[17].ff_context.net[2].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff_context.net[2].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].ff_context.net[2].training, 140591004393408) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff_context.net[2]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff_context.net[2].lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].ff_context.net[2].lora_A, 140533116313632) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff_context.net[2].lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff_context.net[2].lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].ff_context.net[2].lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff_context.net[2].lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].ff_context.net[2].lora_A['default_0'], 140533116313008) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff_context.net[2].lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff_context.net[2].lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].ff_context.net[2].lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff_context.net[2].lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff_context.net[2].lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].ff_context.net[2].lora_A['default_0'].weight, 140537318316192) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff_context.net[2].lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].ff_context.net[2].lora_B, 140533116319296) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff_context.net[2].lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff_context.net[2].lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].ff_context.net[2].lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff_context.net[2].lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].ff_context.net[2].lora_B['default_0'], 140533116313344) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff_context.net[2].lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff_context.net[2].lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].ff_context.net[2].lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff_context.net[2].base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].ff_context.net[2].base_layer, 140581770184064) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff_context.net[2].base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff_context.net[2].base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].ff_context.net[2].base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff_context.net[2].lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].ff_context.net[2].lora_dropout, 140533116320496) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff_context.net[2].lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff_context.net[2].lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].ff_context.net[2].lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff_context.net[2].lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].ff_context.net[2].lora_dropout['default_0'], 140533116306912) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff_context.net[2].lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff_context.net[2].lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].ff_context.net[2].lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff_context.net[2].scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[17].ff_context.net[2].scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[17].ff_context.net[2].scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff_context.net[2].scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[17].ff_context.net[2].scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff_context.net[2].use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[17].ff_context.net[2].use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[17].ff_context.net[2].use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff_context.net[2].use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].ff_context.net[2].use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff_context.net[2]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff_context.net[2]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff_context.net[2].merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[17].ff_context.net[2].merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[17].ff_context.net[2].merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff_context.net[2]._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].ff_context.net[2]._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff_context.net[2]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff_context.net[2]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff_context.net[2]._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[17].ff_context.net[2]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[17].ff_context.net[2]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff_context._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff_context._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff_context._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff_context._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].norm1_context, accessed_by=DictGetItemGuardAccessor(norm1_context) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].norm1_context, 140581769903600) # norm_encoder_hidden_states, c_gate_msa, c_shift_mlp, c_scale_mlp, c_gate_mlp = self.norm1_context( # diffusers/src/diffusers/models/transformers/transformer_flux.py:167 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].norm1_context.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[17].norm1_context.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].norm1_context.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].norm1_context.training, 140591004393440) # norm_encoder_hidden_states, c_gate_msa, c_shift_mlp, c_scale_mlp, c_gate_mlp = self.norm1_context( # diffusers/src/diffusers/models/transformers/transformer_flux.py:167 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].norm1_context.emb, accessed_by=DictGetItemGuardAccessor(emb) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].norm1_context.emb, 140591004478624) # if self.emb is not None: # diffusers/src/diffusers/models/normalization.py:135 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].norm1_context._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].norm1_context.norm, accessed_by=DictGetItemGuardAccessor(norm) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].norm1_context.norm, 140581769903792) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:139 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].norm1_context.norm.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].norm1_context.norm.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].norm1_context.norm.training, 140591004393440) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:139 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].norm1_context.silu, accessed_by=DictGetItemGuardAccessor(silu) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].norm1_context.silu, 140581769903696) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].norm1_context.silu.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].norm1_context.silu.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].norm1_context.silu.training, 140591004393440) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].norm1_context.linear, accessed_by=DictGetItemGuardAccessor(linear) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].norm1_context.linear, 140533115628624) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].norm1_context.linear.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[17].norm1_context.linear.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].norm1_context.linear.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].norm1_context.linear.training, 140591004393408) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].norm1_context.linear._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].norm1_context.linear.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].norm1_context.linear.lora_A, 140533115624928) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].norm1_context.linear.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].norm1_context.linear.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].norm1_context.linear.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].norm1_context.linear.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].norm1_context.linear.lora_A['default_0'], 140533115628816) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].norm1_context.linear.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].norm1_context.linear.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].norm1_context.linear.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].norm1_context.linear.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].norm1_context.linear.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].norm1_context.linear.lora_A['default_0'].weight, 140537316533536) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].norm1_context.linear.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].norm1_context.linear.lora_B, 140533115629152) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].norm1_context.linear.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].norm1_context.linear.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].norm1_context.linear.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].norm1_context.linear.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].norm1_context.linear.lora_B['default_0'], 140533115624736) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].norm1_context.linear.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].norm1_context.linear.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].norm1_context.linear.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].norm1_context.linear.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].norm1_context.linear.base_layer, 140581769903744) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].norm1_context.linear.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].norm1_context.linear.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].norm1_context.linear.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].norm1_context.linear.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].norm1_context.linear.lora_dropout, 140533115625552) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].norm1_context.linear.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].norm1_context.linear.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].norm1_context.linear.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].norm1_context.linear.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].norm1_context.linear.lora_dropout['default_0'], 140533115628048) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].norm1_context.linear.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].norm1_context.linear.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].norm1_context.linear.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].norm1_context.linear.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[17].norm1_context.linear.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[17].norm1_context.linear.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].norm1_context.linear.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[17].norm1_context.linear.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].norm1_context.linear.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[17].norm1_context.linear.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[17].norm1_context.linear.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].norm1_context.linear.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].norm1_context.linear.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].norm1_context.linear._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].norm1_context.linear._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].norm1_context.linear.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[17].norm1_context.linear.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[17].norm1_context.linear.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].norm1_context.linear._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].norm1_context.linear._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].norm1_context.linear._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].norm1_context.linear._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].norm1_context.linear._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[17].norm1_context.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[17].norm1_context.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].norm1_context._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].norm1_context._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].norm1_context._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].norm1_context._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].norm2_context, accessed_by=DictGetItemGuardAccessor(norm2_context) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].norm2_context, 140581770183392) # norm_encoder_hidden_states = self.norm2_context(encoder_hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:195 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].norm2_context.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].norm2_context.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].norm2_context.training, 140591004393440) # norm_encoder_hidden_states = self.norm2_context(encoder_hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:195 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | +- GuardManager: source=L['self'].transformer_blocks[18], accessed_by=GetItemGuardAccessor(18) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18], 140581769903072) # for index_block, block in enumerate(self.transformer_blocks): # diffusers/src/diffusers/models/transformers/transformer_flux.py:471 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[18].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].training, 140591004393440) # for index_block, block in enumerate(self.transformer_blocks): # diffusers/src/diffusers/models/transformers/transformer_flux.py:471 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff, accessed_by=DictGetItemGuardAccessor(ff) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].ff, 140581770185360) # ff_output = self.ff(norm_hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:185 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[18].ff.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].ff.training, 140591004393440) # ff_output = self.ff(norm_hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:185 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff.net, accessed_by=DictGetItemGuardAccessor(net) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].ff.net, 140581770185600) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[18].ff.net, 93831537618768) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- LENGTH_CHECK: len(L['self'].transformer_blocks[18].ff.net) == 3 # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff.net.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff.net.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].ff.net.training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff.net[0], accessed_by=GetItemGuardAccessor(0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].ff.net[0], 140581770185552) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff.net[0].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[18].ff.net[0].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff.net[0].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].ff.net[0].training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff.net[0]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff.net[0].proj, accessed_by=DictGetItemGuardAccessor(proj) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].ff.net[0].proj, 140533117469504) # hidden_states = self.proj(hidden_states) # diffusers/src/diffusers/models/activations.py:88 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff.net[0].proj.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[18].ff.net[0].proj.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff.net[0].proj.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].ff.net[0].proj.training, 140591004393408) # hidden_states = self.proj(hidden_states) # diffusers/src/diffusers/models/activations.py:88 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff.net[0].proj._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff.net[0].proj.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].ff.net[0].proj.lora_A, 140533117476704) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff.net[0].proj.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff.net[0].proj.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].ff.net[0].proj.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff.net[0].proj.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].ff.net[0].proj.lora_A['default_0'], 140533117470272) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff.net[0].proj.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff.net[0].proj.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].ff.net[0].proj.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff.net[0].proj.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff.net[0].proj.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].ff.net[0].proj.lora_A['default_0'].weight, 140537318018240) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff.net[0].proj.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].ff.net[0].proj.lora_B, 140533117476560) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff.net[0].proj.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff.net[0].proj.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].ff.net[0].proj.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff.net[0].proj.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].ff.net[0].proj.lora_B['default_0'], 140533117478048) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff.net[0].proj.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff.net[0].proj.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].ff.net[0].proj.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff.net[0].proj.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].ff.net[0].proj.base_layer, 140581770185648) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff.net[0].proj.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff.net[0].proj.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].ff.net[0].proj.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff.net[0].proj.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].ff.net[0].proj.lora_dropout, 140533117475072) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff.net[0].proj.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff.net[0].proj.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].ff.net[0].proj.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff.net[0].proj.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].ff.net[0].proj.lora_dropout['default_0'], 140533117470848) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff.net[0].proj.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff.net[0].proj.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].ff.net[0].proj.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff.net[0].proj.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[18].ff.net[0].proj.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[18].ff.net[0].proj.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff.net[0].proj.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[18].ff.net[0].proj.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff.net[0].proj.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[18].ff.net[0].proj.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[18].ff.net[0].proj.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff.net[0].proj.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].ff.net[0].proj.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff.net[0].proj._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff.net[0].proj._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff.net[0].proj.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[18].ff.net[0].proj.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[18].ff.net[0].proj.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff.net[0].proj._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].ff.net[0].proj._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff.net[0].proj._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff.net[0].proj._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff.net[0].proj._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[18].ff.net[0].proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[18].ff.net[0].proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff.net[0].approximate, accessed_by=DictGetItemGuardAccessor(approximate) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[18].ff.net[0].approximate == 'tanh' # return F.gelu(gate, approximate=self.approximate) # diffusers/src/diffusers/models/activations.py:83 in gelu V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff.net[0]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff.net[0]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff.net[0]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff.net[0]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff.net[1], accessed_by=GetItemGuardAccessor(1) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].ff.net[1], 140581770185696) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff.net[1].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff.net[1].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].ff.net[1].training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff.net[2], accessed_by=GetItemGuardAccessor(2) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].ff.net[2], 140533117470896) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff.net[2].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[18].ff.net[2].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff.net[2].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].ff.net[2].training, 140591004393408) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff.net[2]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff.net[2].lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].ff.net[2].lora_A, 140533117470704) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff.net[2].lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff.net[2].lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].ff.net[2].lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff.net[2].lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].ff.net[2].lora_A['default_0'], 140533117481216) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff.net[2].lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff.net[2].lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].ff.net[2].lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff.net[2].lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff.net[2].lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].ff.net[2].lora_A['default_0'].weight, 140537318025600) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff.net[2].lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].ff.net[2].lora_B, 140533117477760) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff.net[2].lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff.net[2].lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].ff.net[2].lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff.net[2].lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].ff.net[2].lora_B['default_0'], 140533117481504) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff.net[2].lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff.net[2].lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].ff.net[2].lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff.net[2].base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].ff.net[2].base_layer, 140581770185744) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff.net[2].base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff.net[2].base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].ff.net[2].base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff.net[2].lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].ff.net[2].lora_dropout, 140533117470032) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff.net[2].lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff.net[2].lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].ff.net[2].lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff.net[2].lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].ff.net[2].lora_dropout['default_0'], 140533117470464) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff.net[2].lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff.net[2].lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].ff.net[2].lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff.net[2].scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[18].ff.net[2].scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[18].ff.net[2].scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff.net[2].scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[18].ff.net[2].scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff.net[2].use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[18].ff.net[2].use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[18].ff.net[2].use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff.net[2].use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].ff.net[2].use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff.net[2]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff.net[2]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff.net[2].merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[18].ff.net[2].merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[18].ff.net[2].merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff.net[2]._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].ff.net[2]._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff.net[2]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff.net[2]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff.net[2]._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[18].ff.net[2]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[18].ff.net[2]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn, accessed_by=DictGetItemGuardAccessor(attn) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn, 140581770184592) # attn_output, context_attn_output = self.attn( # diffusers/src/diffusers/models/transformers/transformer_flux.py:172 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[18].attn.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.training, 140591004393440) # attn_output, context_attn_output = self.attn( # diffusers/src/diffusers/models/transformers/transformer_flux.py:172 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_k, accessed_by=DictGetItemGuardAccessor(to_k) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.to_k, 140533114900672) # key = attn.to_k(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1717 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[18].attn.to_k.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.to_k.training, 140591004393408) # key = attn.to_k(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1717 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_k._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_k.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.to_k.lora_A, 140533113851472) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_k.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_k.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.to_k.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_k.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.to_k.lora_A['default_0'], 140533113848928) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_k.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_k.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.to_k.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_k.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_k.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.to_k.lora_A['default_0'].weight, 140537318177216) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_k.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.to_k.lora_B, 140533113861984) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_k.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_k.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.to_k.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_k.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.to_k.lora_B['default_0'], 140533113850752) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_k.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_k.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.to_k.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_k.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.to_k.base_layer, 140581770184736) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_k.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_k.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.to_k.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_k.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.to_k.lora_dropout, 140533114898896) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_k.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_k.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.to_k.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_k.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.to_k.lora_dropout['default_0'], 140533114898800) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_k.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_k.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.to_k.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_k.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[18].attn.to_k.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[18].attn.to_k.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_k.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[18].attn.to_k.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_k.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[18].attn.to_k.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[18].attn.to_k.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_k.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.to_k.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_k._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_k._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_k.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[18].attn.to_k.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[18].attn.to_k.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_k._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.to_k._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_k._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_k._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_k._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[18].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[18].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_q, accessed_by=DictGetItemGuardAccessor(to_q) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.to_q, 140533114906192) # query = attn.to_q(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1716 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[18].attn.to_q.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.to_q.training, 140591004393408) # query = attn.to_q(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1716 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_q._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_q.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.to_q.lora_A, 140533114905760) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_q.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_q.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.to_q.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_q.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.to_q.lora_A['default_0'], 140533114904608) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_q.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_q.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.to_q.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_q.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_q.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.to_q.lora_A['default_0'].weight, 140537318165136) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_q.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.to_q.lora_B, 140533114899232) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_q.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_q.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.to_q.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_q.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.to_q.lora_B['default_0'], 140533114903312) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_q.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_q.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.to_q.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_q.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.to_q.base_layer, 140581770184832) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_q.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_q.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.to_q.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_q.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.to_q.lora_dropout, 140533114906384) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_q.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_q.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.to_q.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_q.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.to_q.lora_dropout['default_0'], 140533114906528) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_q.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_q.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.to_q.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_q.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[18].attn.to_q.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[18].attn.to_q.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_q.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[18].attn.to_q.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_q.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[18].attn.to_q.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[18].attn.to_q.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_q.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.to_q.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_q._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_q._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_q.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[18].attn.to_q.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[18].attn.to_q.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_q._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.to_q._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_q._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_q._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_q._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[18].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[18].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_v, accessed_by=DictGetItemGuardAccessor(to_v) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.to_v, 140533113860976) # value = attn.to_v(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1718 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_v.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[18].attn.to_v.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_v.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.to_v.training, 140591004393408) # value = attn.to_v(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1718 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_v._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_v.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.to_v.lora_A, 140533113859296) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_v.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_v.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.to_v.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_v.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.to_v.lora_A['default_0'], 140533113861120) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_v.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_v.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.to_v.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_v.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_v.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.to_v.lora_A['default_0'].weight, 140537318166096) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_v.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.to_v.lora_B, 140533113859776) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_v.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_v.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.to_v.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_v.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.to_v.lora_B['default_0'], 140533113859824) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_v.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_v.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.to_v.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_v.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.to_v.base_layer, 140581770184928) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_v.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_v.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.to_v.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_v.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.to_v.lora_dropout, 140533113850032) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_v.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_v.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.to_v.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_v.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.to_v.lora_dropout['default_0'], 140533113849792) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_v.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_v.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.to_v.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_v.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[18].attn.to_v.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[18].attn.to_v.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_v.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[18].attn.to_v.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_v.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[18].attn.to_v.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[18].attn.to_v.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_v.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.to_v.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_v._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_v._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_v.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[18].attn.to_v.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[18].attn.to_v.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_v._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.to_v._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_v._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_v._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_v._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[18].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[18].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.norm_k, accessed_by=DictGetItemGuardAccessor(norm_k) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.norm_k, 140581770184784) # if attn.norm_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1729 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.norm_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[18].attn.norm_k.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.norm_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.norm_k.training, 140591004393440) # if attn.norm_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1729 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.norm_k.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[18].attn.norm_k.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.norm_k._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.norm_k.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.norm_k.weight, 140581772745904) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.norm_k._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.norm_k._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.norm_k._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.norm_k._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.norm_q, accessed_by=DictGetItemGuardAccessor(norm_q) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.norm_q, 140581770184688) # if attn.norm_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1727 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.norm_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[18].attn.norm_q.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.norm_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.norm_q.training, 140591004393440) # if attn.norm_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1727 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.norm_q.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[18].attn.norm_q.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.norm_q._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.norm_q.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.norm_q.weight, 140581772745664) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.norm_q._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.norm_q._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.norm_q._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.norm_q._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_out, accessed_by=DictGetItemGuardAccessor(to_out) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.to_out, 140581770185120) # hidden_states = attn.to_out[0](hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1776 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_out.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_out.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.to_out.training, 140591004393440) # hidden_states = attn.to_out[0](hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1776 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_out[0], accessed_by=GetItemGuardAccessor(0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.to_out[0], 140533113853392) # hidden_states = attn.to_out[0](hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1776 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_out[0].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[18].attn.to_out[0].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_out[0].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.to_out[0].training, 140591004393408) # hidden_states = attn.to_out[0](hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1776 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_out[0]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_out[0].lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.to_out[0].lora_A, 140533113858576) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_out[0].lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_out[0].lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.to_out[0].lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_out[0].lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.to_out[0].lora_A['default_0'], 140533113648320) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_out[0].lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_out[0].lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.to_out[0].lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_out[0].lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_out[0].lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.to_out[0].lora_A['default_0'].weight, 140537318029360) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_out[0].lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.to_out[0].lora_B, 140533113861312) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_out[0].lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_out[0].lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.to_out[0].lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_out[0].lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.to_out[0].lora_B['default_0'], 140533113651008) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_out[0].lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_out[0].lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.to_out[0].lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_out[0].base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.to_out[0].base_layer, 140581770185168) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_out[0].base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_out[0].base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.to_out[0].base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_out[0].lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.to_out[0].lora_dropout, 140533113848304) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_out[0].lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_out[0].lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.to_out[0].lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_out[0].lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.to_out[0].lora_dropout['default_0'], 140533113860688) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_out[0].lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_out[0].lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.to_out[0].lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_out[0].scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[18].attn.to_out[0].scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[18].attn.to_out[0].scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_out[0].scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[18].attn.to_out[0].scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_out[0].use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[18].attn.to_out[0].use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[18].attn.to_out[0].use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_out[0].use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.to_out[0].use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_out[0]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_out[0]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_out[0].merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[18].attn.to_out[0].merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[18].attn.to_out[0].merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_out[0]._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.to_out[0]._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_out[0]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_out[0]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_out[0]._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[18].attn.to_out[0]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[18].attn.to_out[0]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_out[1], accessed_by=GetItemGuardAccessor(1) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.to_out[1], 140581770185216) # hidden_states = attn.to_out[1](hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1778 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_out[1].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_out[1].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.to_out[1].training, 140591004393440) # hidden_states = attn.to_out[1](hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1778 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.add_k_proj, accessed_by=DictGetItemGuardAccessor(add_k_proj) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.add_k_proj, 140533113860208) # encoder_hidden_states_key_proj = attn.add_k_proj(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1736 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.add_k_proj.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[18].attn.add_k_proj.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.add_k_proj.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.add_k_proj.training, 140591004393408) # encoder_hidden_states_key_proj = attn.add_k_proj(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1736 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.add_k_proj._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.add_k_proj.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.add_k_proj.lora_A, 140533113858288) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.add_k_proj.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.add_k_proj.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.add_k_proj.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.add_k_proj.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.add_k_proj.lora_A['default_0'], 140533113853872) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.add_k_proj.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.add_k_proj.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.add_k_proj.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.add_k_proj.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.add_k_proj.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.add_k_proj.lora_A['default_0'].weight, 140537318166576) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.add_k_proj.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.add_k_proj.lora_B, 140533113853488) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.add_k_proj.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.add_k_proj.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.add_k_proj.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.add_k_proj.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.add_k_proj.lora_B['default_0'], 140533113854256) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.add_k_proj.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.add_k_proj.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.add_k_proj.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.add_k_proj.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.add_k_proj.base_layer, 140581770184976) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.add_k_proj.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.add_k_proj.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.add_k_proj.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.add_k_proj.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.add_k_proj.lora_dropout, 140533113860352) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.add_k_proj.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.add_k_proj.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.add_k_proj.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.add_k_proj.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.add_k_proj.lora_dropout['default_0'], 140533113860304) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.add_k_proj.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.add_k_proj.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.add_k_proj.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.add_k_proj.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[18].attn.add_k_proj.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[18].attn.add_k_proj.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.add_k_proj.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[18].attn.add_k_proj.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.add_k_proj.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[18].attn.add_k_proj.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[18].attn.add_k_proj.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.add_k_proj.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.add_k_proj.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.add_k_proj._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.add_k_proj._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.add_k_proj.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[18].attn.add_k_proj.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[18].attn.add_k_proj.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.add_k_proj._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.add_k_proj._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.add_k_proj._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.add_k_proj._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.add_k_proj._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[18].attn.add_k_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[18].attn.add_k_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.add_q_proj, accessed_by=DictGetItemGuardAccessor(add_q_proj) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.add_q_proj, 140533113851952) # encoder_hidden_states_query_proj = attn.add_q_proj(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1735 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.add_q_proj.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[18].attn.add_q_proj.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.add_q_proj.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.add_q_proj.training, 140591004393408) # encoder_hidden_states_query_proj = attn.add_q_proj(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1735 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.add_q_proj._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.add_q_proj.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.add_q_proj.lora_A, 140533113851760) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.add_q_proj.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.add_q_proj.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.add_q_proj.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.add_q_proj.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.add_q_proj.lora_A['default_0'], 140533113849696) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.add_q_proj.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.add_q_proj.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.add_q_proj.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.add_q_proj.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.add_q_proj.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.add_q_proj.lora_A['default_0'].weight, 140537318019200) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.add_q_proj.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.add_q_proj.lora_B, 140533113851328) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.add_q_proj.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.add_q_proj.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.add_q_proj.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.add_q_proj.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.add_q_proj.lora_B['default_0'], 140533113850080) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.add_q_proj.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.add_q_proj.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.add_q_proj.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.add_q_proj.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.add_q_proj.base_layer, 140581770185072) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.add_q_proj.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.add_q_proj.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.add_q_proj.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.add_q_proj.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.add_q_proj.lora_dropout, 140533113852048) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.add_q_proj.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.add_q_proj.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.add_q_proj.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.add_q_proj.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.add_q_proj.lora_dropout['default_0'], 140533113851904) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.add_q_proj.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.add_q_proj.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.add_q_proj.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.add_q_proj.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[18].attn.add_q_proj.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[18].attn.add_q_proj.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.add_q_proj.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[18].attn.add_q_proj.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.add_q_proj.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[18].attn.add_q_proj.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[18].attn.add_q_proj.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.add_q_proj.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.add_q_proj.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.add_q_proj._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.add_q_proj._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.add_q_proj.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[18].attn.add_q_proj.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[18].attn.add_q_proj.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.add_q_proj._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.add_q_proj._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.add_q_proj._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.add_q_proj._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.add_q_proj._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[18].attn.add_q_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[18].attn.add_q_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.add_v_proj, accessed_by=DictGetItemGuardAccessor(add_v_proj) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.add_v_proj, 140533113853584) # encoder_hidden_states_value_proj = attn.add_v_proj(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1737 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.add_v_proj.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[18].attn.add_v_proj.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.add_v_proj.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.add_v_proj.training, 140591004393408) # encoder_hidden_states_value_proj = attn.add_v_proj(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1737 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.add_v_proj._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.add_v_proj.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.add_v_proj.lora_A, 140533113854544) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.add_v_proj.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.add_v_proj.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.add_v_proj.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.add_v_proj.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.add_v_proj.lora_A['default_0'], 140533113852096) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.add_v_proj.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.add_v_proj.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.add_v_proj.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.add_v_proj.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.add_v_proj.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.add_v_proj.lora_A['default_0'].weight, 140537318169376) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.add_v_proj.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.add_v_proj.lora_B, 140533113850320) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.add_v_proj.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.add_v_proj.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.add_v_proj.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.add_v_proj.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.add_v_proj.lora_B['default_0'], 140533113852432) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.add_v_proj.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.add_v_proj.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.add_v_proj.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.add_v_proj.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.add_v_proj.base_layer, 140581770185024) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.add_v_proj.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.add_v_proj.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.add_v_proj.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.add_v_proj.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.add_v_proj.lora_dropout, 140533113853728) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.add_v_proj.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.add_v_proj.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.add_v_proj.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.add_v_proj.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.add_v_proj.lora_dropout['default_0'], 140533113854208) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.add_v_proj.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.add_v_proj.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.add_v_proj.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.add_v_proj.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[18].attn.add_v_proj.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[18].attn.add_v_proj.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.add_v_proj.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[18].attn.add_v_proj.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.add_v_proj.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[18].attn.add_v_proj.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[18].attn.add_v_proj.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.add_v_proj.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.add_v_proj.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.add_v_proj._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.add_v_proj._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.add_v_proj.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[18].attn.add_v_proj.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[18].attn.add_v_proj.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.add_v_proj._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.add_v_proj._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.add_v_proj._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.add_v_proj._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.add_v_proj._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[18].attn.add_v_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[18].attn.add_v_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_add_out, accessed_by=DictGetItemGuardAccessor(to_add_out) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.to_add_out, 140533113649664) # encoder_hidden_states = attn.to_add_out(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1779 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_add_out.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[18].attn.to_add_out.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_add_out.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.to_add_out.training, 140591004393408) # encoder_hidden_states = attn.to_add_out(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1779 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_add_out._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_add_out.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.to_add_out.lora_A, 140533113651152) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_add_out.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_add_out.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.to_add_out.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_add_out.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.to_add_out.lora_A['default_0'], 140533117483520) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_add_out.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_add_out.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.to_add_out.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_add_out.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_add_out.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.to_add_out.lora_A['default_0'].weight, 140537318018640) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_add_out.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.to_add_out.lora_B, 140533117478816) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_add_out.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_add_out.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.to_add_out.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_add_out.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.to_add_out.lora_B['default_0'], 140533117481840) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_add_out.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_add_out.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.to_add_out.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_add_out.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.to_add_out.base_layer, 140581770185264) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_add_out.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_add_out.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.to_add_out.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_add_out.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.to_add_out.lora_dropout, 140533113640112) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_add_out.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_add_out.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.to_add_out.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_add_out.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.to_add_out.lora_dropout['default_0'], 140533113649280) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_add_out.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_add_out.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.to_add_out.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_add_out.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[18].attn.to_add_out.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[18].attn.to_add_out.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_add_out.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[18].attn.to_add_out.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_add_out.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[18].attn.to_add_out.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[18].attn.to_add_out.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_add_out.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.to_add_out.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_add_out._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_add_out._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_add_out.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[18].attn.to_add_out.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[18].attn.to_add_out.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_add_out._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.to_add_out._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_add_out._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_add_out._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_add_out._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[18].attn.to_add_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[18].attn.to_add_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.norm_added_k, accessed_by=DictGetItemGuardAccessor(norm_added_k) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.norm_added_k, 140581770185408) # if attn.norm_added_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1751 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.norm_added_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[18].attn.norm_added_k.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.norm_added_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.norm_added_k.training, 140591004393440) # if attn.norm_added_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1751 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.norm_added_k.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[18].attn.norm_added_k.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.norm_added_k._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.norm_added_k.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.norm_added_k.weight, 140581773242944) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.norm_added_k._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.norm_added_k._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.norm_added_k._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.norm_added_k._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.norm_added_q, accessed_by=DictGetItemGuardAccessor(norm_added_q) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.norm_added_q, 140581770185312) # if attn.norm_added_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1749 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.norm_added_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[18].attn.norm_added_q.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.norm_added_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.norm_added_q.training, 140591004393440) # if attn.norm_added_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1749 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.norm_added_q.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[18].attn.norm_added_q.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.norm_added_q._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.norm_added_q.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.norm_added_q.weight, 140581773236704) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.norm_added_q._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.norm_added_q._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.norm_added_q._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.norm_added_q._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.heads, accessed_by=DictGetItemGuardAccessor(heads) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[18].attn.heads == 24 # head_dim = inner_dim // attn.heads # diffusers/src/diffusers/models/attention_processor.py:1721 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.processor, accessed_by=DictGetItemGuardAccessor(processor) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[18].attn.processor, 93831581524080) # attn_parameters = set(inspect.signature(self.processor.__call__).parameters.keys()) # diffusers/src/diffusers/models/attention_processor.py:479 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.processor, 140581770184544) # return self.processor( # diffusers/src/diffusers/models/attention_processor.py:490 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].norm1, accessed_by=DictGetItemGuardAccessor(norm1) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].norm1, 140581770184112) # norm_hidden_states, gate_msa, shift_mlp, scale_mlp, gate_mlp = self.norm1(hidden_states, emb=temb) # diffusers/src/diffusers/models/transformers/transformer_flux.py:165 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].norm1.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[18].norm1.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].norm1.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].norm1.training, 140591004393440) # norm_hidden_states, gate_msa, shift_mlp, scale_mlp, gate_mlp = self.norm1(hidden_states, emb=temb) # diffusers/src/diffusers/models/transformers/transformer_flux.py:165 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].norm1.emb, accessed_by=DictGetItemGuardAccessor(emb) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].norm1.emb, 140591004478624) # if self.emb is not None: # diffusers/src/diffusers/models/normalization.py:135 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].norm1._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].norm1.norm, accessed_by=DictGetItemGuardAccessor(norm) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].norm1.norm, 140581770184256) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:139 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].norm1.norm.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].norm1.norm.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].norm1.norm.training, 140591004393440) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:139 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].norm1.silu, accessed_by=DictGetItemGuardAccessor(silu) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].norm1.silu, 140581770184160) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].norm1.silu.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].norm1.silu.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].norm1.silu.training, 140591004393440) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].norm1.linear, accessed_by=DictGetItemGuardAccessor(linear) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].norm1.linear, 140533116316368) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].norm1.linear.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[18].norm1.linear.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].norm1.linear.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].norm1.linear.training, 140591004393408) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].norm1.linear._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].norm1.linear.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].norm1.linear.lora_A, 140533116306624) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].norm1.linear.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].norm1.linear.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].norm1.linear.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].norm1.linear.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].norm1.linear.lora_A['default_0'], 140533116306336) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].norm1.linear.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].norm1.linear.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].norm1.linear.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].norm1.linear.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].norm1.linear.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].norm1.linear.lora_A['default_0'].weight, 140537318176416) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].norm1.linear.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].norm1.linear.lora_B, 140533116306528) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].norm1.linear.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].norm1.linear.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].norm1.linear.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].norm1.linear.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].norm1.linear.lora_B['default_0'], 140533116306240) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].norm1.linear.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].norm1.linear.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].norm1.linear.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].norm1.linear.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].norm1.linear.base_layer, 140581770184208) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].norm1.linear.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].norm1.linear.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].norm1.linear.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].norm1.linear.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].norm1.linear.lora_dropout, 140533116315216) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].norm1.linear.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].norm1.linear.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].norm1.linear.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].norm1.linear.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].norm1.linear.lora_dropout['default_0'], 140533116315744) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].norm1.linear.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].norm1.linear.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].norm1.linear.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].norm1.linear.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[18].norm1.linear.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[18].norm1.linear.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].norm1.linear.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[18].norm1.linear.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].norm1.linear.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[18].norm1.linear.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[18].norm1.linear.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].norm1.linear.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].norm1.linear.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].norm1.linear._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].norm1.linear._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].norm1.linear.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[18].norm1.linear.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[18].norm1.linear.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].norm1.linear._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].norm1.linear._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].norm1.linear._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].norm1.linear._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].norm1.linear._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[18].norm1.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[18].norm1.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].norm1._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].norm1._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].norm1._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].norm1._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].norm2, accessed_by=DictGetItemGuardAccessor(norm2) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].norm2, 140581770185456) # norm_hidden_states = self.norm2(hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:182 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].norm2.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].norm2.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].norm2.training, 140591004393440) # norm_hidden_states = self.norm2(hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:182 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff_context, accessed_by=DictGetItemGuardAccessor(ff_context) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].ff_context, 140581770185792) # context_ff_output = self.ff_context(norm_encoder_hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:198 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff_context.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[18].ff_context.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff_context.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].ff_context.training, 140591004393440) # context_ff_output = self.ff_context(norm_encoder_hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:198 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff_context._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff_context.net, accessed_by=DictGetItemGuardAccessor(net) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].ff_context.net, 140581770185936) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[18].ff_context.net, 93831537618768) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- LENGTH_CHECK: len(L['self'].transformer_blocks[18].ff_context.net) == 3 # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff_context.net.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff_context.net.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].ff_context.net.training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff_context.net[0], accessed_by=GetItemGuardAccessor(0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].ff_context.net[0], 140581770185888) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff_context.net[0].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[18].ff_context.net[0].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff_context.net[0].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].ff_context.net[0].training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff_context.net[0]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff_context.net[0].proj, accessed_by=DictGetItemGuardAccessor(proj) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].ff_context.net[0].proj, 140533117481408) # hidden_states = self.proj(hidden_states) # diffusers/src/diffusers/models/activations.py:88 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff_context.net[0].proj.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[18].ff_context.net[0].proj.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff_context.net[0].proj.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].ff_context.net[0].proj.training, 140591004393408) # hidden_states = self.proj(hidden_states) # diffusers/src/diffusers/models/activations.py:88 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff_context.net[0].proj._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff_context.net[0].proj.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].ff_context.net[0].proj.lora_A, 140533117478480) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff_context.net[0].proj.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff_context.net[0].proj.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].ff_context.net[0].proj.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff_context.net[0].proj.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].ff_context.net[0].proj.lora_A['default_0'], 140533117480592) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff_context.net[0].proj.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff_context.net[0].proj.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].ff_context.net[0].proj.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff_context.net[0].proj.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff_context.net[0].proj.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].ff_context.net[0].proj.lora_A['default_0'].weight, 140537318016480) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff_context.net[0].proj.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].ff_context.net[0].proj.lora_B, 140533117476944) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff_context.net[0].proj.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff_context.net[0].proj.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].ff_context.net[0].proj.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff_context.net[0].proj.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].ff_context.net[0].proj.lora_B['default_0'], 140533117484816) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff_context.net[0].proj.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff_context.net[0].proj.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].ff_context.net[0].proj.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff_context.net[0].proj.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].ff_context.net[0].proj.base_layer, 140581770185984) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff_context.net[0].proj.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff_context.net[0].proj.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].ff_context.net[0].proj.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff_context.net[0].proj.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].ff_context.net[0].proj.lora_dropout, 140533117469312) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff_context.net[0].proj.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff_context.net[0].proj.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].ff_context.net[0].proj.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff_context.net[0].proj.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].ff_context.net[0].proj.lora_dropout['default_0'], 140533117470320) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff_context.net[0].proj.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff_context.net[0].proj.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].ff_context.net[0].proj.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff_context.net[0].proj.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[18].ff_context.net[0].proj.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[18].ff_context.net[0].proj.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff_context.net[0].proj.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[18].ff_context.net[0].proj.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff_context.net[0].proj.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[18].ff_context.net[0].proj.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[18].ff_context.net[0].proj.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff_context.net[0].proj.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].ff_context.net[0].proj.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff_context.net[0].proj._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff_context.net[0].proj._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff_context.net[0].proj.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[18].ff_context.net[0].proj.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[18].ff_context.net[0].proj.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff_context.net[0].proj._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].ff_context.net[0].proj._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff_context.net[0].proj._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff_context.net[0].proj._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff_context.net[0].proj._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[18].ff_context.net[0].proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[18].ff_context.net[0].proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff_context.net[0].approximate, accessed_by=DictGetItemGuardAccessor(approximate) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[18].ff_context.net[0].approximate == 'tanh' # return F.gelu(gate, approximate=self.approximate) # diffusers/src/diffusers/models/activations.py:83 in gelu V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff_context.net[0]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff_context.net[0]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff_context.net[0]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff_context.net[0]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff_context.net[1], accessed_by=GetItemGuardAccessor(1) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].ff_context.net[1], 140581770186080) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff_context.net[1].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff_context.net[1].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].ff_context.net[1].training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff_context.net[2], accessed_by=GetItemGuardAccessor(2) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].ff_context.net[2], 140533115118704) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff_context.net[2].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[18].ff_context.net[2].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff_context.net[2].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].ff_context.net[2].training, 140591004393408) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff_context.net[2]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff_context.net[2].lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].ff_context.net[2].lora_A, 140533115118656) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff_context.net[2].lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff_context.net[2].lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].ff_context.net[2].lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff_context.net[2].lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].ff_context.net[2].lora_A['default_0'], 140533115125424) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff_context.net[2].lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff_context.net[2].lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].ff_context.net[2].lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff_context.net[2].lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff_context.net[2].lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].ff_context.net[2].lora_A['default_0'].weight, 140537317846736) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff_context.net[2].lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].ff_context.net[2].lora_B, 140533115124560) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff_context.net[2].lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff_context.net[2].lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].ff_context.net[2].lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff_context.net[2].lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].ff_context.net[2].lora_B['default_0'], 140533115118944) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff_context.net[2].lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff_context.net[2].lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].ff_context.net[2].lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff_context.net[2].base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].ff_context.net[2].base_layer, 140581770186128) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff_context.net[2].base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff_context.net[2].base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].ff_context.net[2].base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff_context.net[2].lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].ff_context.net[2].lora_dropout, 140533115125328) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff_context.net[2].lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff_context.net[2].lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].ff_context.net[2].lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff_context.net[2].lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].ff_context.net[2].lora_dropout['default_0'], 140533115112416) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff_context.net[2].lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff_context.net[2].lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].ff_context.net[2].lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff_context.net[2].scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[18].ff_context.net[2].scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[18].ff_context.net[2].scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff_context.net[2].scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[18].ff_context.net[2].scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff_context.net[2].use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[18].ff_context.net[2].use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[18].ff_context.net[2].use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff_context.net[2].use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].ff_context.net[2].use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff_context.net[2]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff_context.net[2]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff_context.net[2].merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[18].ff_context.net[2].merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[18].ff_context.net[2].merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff_context.net[2]._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].ff_context.net[2]._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff_context.net[2]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff_context.net[2]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff_context.net[2]._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[18].ff_context.net[2]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[18].ff_context.net[2]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff_context._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff_context._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff_context._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff_context._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].norm1_context, accessed_by=DictGetItemGuardAccessor(norm1_context) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].norm1_context, 140581770184304) # norm_encoder_hidden_states, c_gate_msa, c_shift_mlp, c_scale_mlp, c_gate_mlp = self.norm1_context( # diffusers/src/diffusers/models/transformers/transformer_flux.py:167 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].norm1_context.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[18].norm1_context.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].norm1_context.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].norm1_context.training, 140591004393440) # norm_encoder_hidden_states, c_gate_msa, c_shift_mlp, c_scale_mlp, c_gate_mlp = self.norm1_context( # diffusers/src/diffusers/models/transformers/transformer_flux.py:167 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].norm1_context.emb, accessed_by=DictGetItemGuardAccessor(emb) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].norm1_context.emb, 140591004478624) # if self.emb is not None: # diffusers/src/diffusers/models/normalization.py:135 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].norm1_context._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].norm1_context.norm, accessed_by=DictGetItemGuardAccessor(norm) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].norm1_context.norm, 140581770184496) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:139 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].norm1_context.norm.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].norm1_context.norm.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].norm1_context.norm.training, 140591004393440) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:139 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].norm1_context.silu, accessed_by=DictGetItemGuardAccessor(silu) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].norm1_context.silu, 140581770184400) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].norm1_context.silu.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].norm1_context.silu.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].norm1_context.silu.training, 140591004393440) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].norm1_context.linear, accessed_by=DictGetItemGuardAccessor(linear) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].norm1_context.linear, 140533116320976) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].norm1_context.linear.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[18].norm1_context.linear.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].norm1_context.linear.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].norm1_context.linear.training, 140591004393408) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].norm1_context.linear._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].norm1_context.linear.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].norm1_context.linear.lora_A, 140533116313248) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].norm1_context.linear.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].norm1_context.linear.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].norm1_context.linear.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].norm1_context.linear.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].norm1_context.linear.lora_A['default_0'], 140533114896496) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].norm1_context.linear.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].norm1_context.linear.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].norm1_context.linear.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].norm1_context.linear.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].norm1_context.linear.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].norm1_context.linear.lora_A['default_0'].weight, 140537318177056) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].norm1_context.linear.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].norm1_context.linear.lora_B, 140533116313440) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].norm1_context.linear.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].norm1_context.linear.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].norm1_context.linear.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].norm1_context.linear.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].norm1_context.linear.lora_B['default_0'], 140533114903648) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].norm1_context.linear.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].norm1_context.linear.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].norm1_context.linear.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].norm1_context.linear.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].norm1_context.linear.base_layer, 140581770184448) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].norm1_context.linear.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].norm1_context.linear.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].norm1_context.linear.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].norm1_context.linear.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].norm1_context.linear.lora_dropout, 140533116313392) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].norm1_context.linear.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].norm1_context.linear.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].norm1_context.linear.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].norm1_context.linear.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].norm1_context.linear.lora_dropout['default_0'], 140533116311808) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].norm1_context.linear.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].norm1_context.linear.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].norm1_context.linear.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].norm1_context.linear.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[18].norm1_context.linear.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[18].norm1_context.linear.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].norm1_context.linear.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[18].norm1_context.linear.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].norm1_context.linear.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[18].norm1_context.linear.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[18].norm1_context.linear.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].norm1_context.linear.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].norm1_context.linear.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].norm1_context.linear._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].norm1_context.linear._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].norm1_context.linear.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[18].norm1_context.linear.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[18].norm1_context.linear.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].norm1_context.linear._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].norm1_context.linear._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].norm1_context.linear._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].norm1_context.linear._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].norm1_context.linear._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[18].norm1_context.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[18].norm1_context.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].norm1_context._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].norm1_context._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].norm1_context._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].norm1_context._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].norm2_context, accessed_by=DictGetItemGuardAccessor(norm2_context) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].norm2_context, 140581770185504) # norm_encoder_hidden_states = self.norm2_context(encoder_hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:195 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].norm2_context.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].norm2_context.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].norm2_context.training, 140591004393440) # norm_encoder_hidden_states = self.norm2_context(encoder_hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:195 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | +- GuardManager: source=L['self'].single_transformer_blocks, accessed_by=DictGetItemGuardAccessor(single_transformer_blocks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks, 140581770185840) # for index_block, block in enumerate(self.single_transformer_blocks): # diffusers/src/diffusers/models/transformers/transformer_flux.py:509 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | +- GuardManager: source=L['self'].single_transformer_blocks.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks.training, 140591004393440) # for index_block, block in enumerate(self.single_transformer_blocks): # diffusers/src/diffusers/models/transformers/transformer_flux.py:509 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0], accessed_by=GetItemGuardAccessor(0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[0], 140581770183776) # for index_block, block in enumerate(self.single_transformer_blocks): # diffusers/src/diffusers/models/transformers/transformer_flux.py:509 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[0].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[0].training, 140591004393440) # for index_block, block in enumerate(self.single_transformer_blocks): # diffusers/src/diffusers/models/transformers/transformer_flux.py:509 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].attn, accessed_by=DictGetItemGuardAccessor(attn) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[0].attn, 140581770186656) # attn_output = self.attn( # diffusers/src/diffusers/models/transformers/transformer_flux.py:91 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].attn.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[0].attn.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].attn.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[0].attn.training, 140591004393440) # attn_output = self.attn( # diffusers/src/diffusers/models/transformers/transformer_flux.py:91 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].attn._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].attn.to_k, accessed_by=DictGetItemGuardAccessor(to_k) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[0].attn.to_k, 140533115348272) # key = attn.to_k(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1717 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].attn.to_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[0].attn.to_k.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].attn.to_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[0].attn.to_k.training, 140591004393408) # key = attn.to_k(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1717 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].attn.to_k._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].attn.to_k.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[0].attn.to_k.lora_A, 140533115341648) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].attn.to_k.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].attn.to_k.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[0].attn.to_k.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].attn.to_k.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[0].attn.to_k.lora_A['default_0'], 140533115445184) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].attn.to_k.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].attn.to_k.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[0].attn.to_k.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].attn.to_k.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].attn.to_k.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[0].attn.to_k.lora_A['default_0'].weight, 140537317835696) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].attn.to_k.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[0].attn.to_k.lora_B, 140533115339968) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].attn.to_k.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].attn.to_k.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[0].attn.to_k.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].attn.to_k.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[0].attn.to_k.lora_B['default_0'], 140533115443840) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].attn.to_k.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].attn.to_k.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[0].attn.to_k.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].attn.to_k.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[0].attn.to_k.base_layer, 140581770186800) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].attn.to_k.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].attn.to_k.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[0].attn.to_k.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].attn.to_k.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[0].attn.to_k.lora_dropout, 140533115348320) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].attn.to_k.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].attn.to_k.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[0].attn.to_k.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].attn.to_k.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[0].attn.to_k.lora_dropout['default_0'], 140533115347552) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].attn.to_k.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].attn.to_k.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[0].attn.to_k.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].attn.to_k.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[0].attn.to_k.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[0].attn.to_k.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].attn.to_k.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[0].attn.to_k.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].attn.to_k.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[0].attn.to_k.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[0].attn.to_k.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].attn.to_k.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[0].attn.to_k.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].attn.to_k._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].attn.to_k._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].attn.to_k.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[0].attn.to_k.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[0].attn.to_k.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].attn.to_k._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[0].attn.to_k._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].attn.to_k._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].attn.to_k._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].attn.to_k._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[0].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[0].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].attn.to_q, accessed_by=DictGetItemGuardAccessor(to_q) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[0].attn.to_q, 140533115343040) # query = attn.to_q(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1716 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].attn.to_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[0].attn.to_q.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].attn.to_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[0].attn.to_q.training, 140591004393408) # query = attn.to_q(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1716 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].attn.to_q._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].attn.to_q.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[0].attn.to_q.lora_A, 140533115348608) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].attn.to_q.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].attn.to_q.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[0].attn.to_q.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].attn.to_q.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[0].attn.to_q.lora_A['default_0'], 140533115348128) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].attn.to_q.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].attn.to_q.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[0].attn.to_q.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].attn.to_q.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].attn.to_q.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[0].attn.to_q.lora_A['default_0'].weight, 140537317843696) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].attn.to_q.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[0].attn.to_q.lora_B, 140533115341072) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].attn.to_q.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].attn.to_q.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[0].attn.to_q.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].attn.to_q.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[0].attn.to_q.lora_B['default_0'], 140533115348512) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].attn.to_q.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].attn.to_q.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[0].attn.to_q.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].attn.to_q.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[0].attn.to_q.base_layer, 140581770186896) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].attn.to_q.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].attn.to_q.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[0].attn.to_q.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].attn.to_q.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[0].attn.to_q.lora_dropout, 140533115341696) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].attn.to_q.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].attn.to_q.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[0].attn.to_q.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].attn.to_q.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[0].attn.to_q.lora_dropout['default_0'], 140533115343568) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].attn.to_q.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].attn.to_q.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[0].attn.to_q.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].attn.to_q.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[0].attn.to_q.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[0].attn.to_q.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].attn.to_q.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[0].attn.to_q.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].attn.to_q.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[0].attn.to_q.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[0].attn.to_q.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].attn.to_q.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[0].attn.to_q.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].attn.to_q._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].attn.to_q._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].attn.to_q.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[0].attn.to_q.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[0].attn.to_q.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].attn.to_q._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[0].attn.to_q._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].attn.to_q._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].attn.to_q._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].attn.to_q._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[0].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[0].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].attn.to_v, accessed_by=DictGetItemGuardAccessor(to_v) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[0].attn.to_v, 140533115444272) # value = attn.to_v(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1718 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].attn.to_v.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[0].attn.to_v.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].attn.to_v.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[0].attn.to_v.training, 140591004393408) # value = attn.to_v(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1718 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].attn.to_v._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].attn.to_v.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[0].attn.to_v.lora_A, 140533115449024) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].attn.to_v.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].attn.to_v.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[0].attn.to_v.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].attn.to_v.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[0].attn.to_v.lora_A['default_0'], 140533116535136) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].attn.to_v.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].attn.to_v.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[0].attn.to_v.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].attn.to_v.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].attn.to_v.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[0].attn.to_v.lora_A['default_0'].weight, 140537317694880) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].attn.to_v.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[0].attn.to_v.lora_B, 140533115451760) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].attn.to_v.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].attn.to_v.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[0].attn.to_v.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].attn.to_v.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[0].attn.to_v.lora_B['default_0'], 140533116535184) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].attn.to_v.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].attn.to_v.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[0].attn.to_v.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].attn.to_v.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[0].attn.to_v.base_layer, 140581770186944) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].attn.to_v.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].attn.to_v.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[0].attn.to_v.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].attn.to_v.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[0].attn.to_v.lora_dropout, 140533115449264) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].attn.to_v.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].attn.to_v.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[0].attn.to_v.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].attn.to_v.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[0].attn.to_v.lora_dropout['default_0'], 140533115452144) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].attn.to_v.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].attn.to_v.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[0].attn.to_v.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].attn.to_v.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[0].attn.to_v.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[0].attn.to_v.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].attn.to_v.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[0].attn.to_v.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].attn.to_v.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[0].attn.to_v.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[0].attn.to_v.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].attn.to_v.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[0].attn.to_v.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].attn.to_v._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].attn.to_v._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].attn.to_v.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[0].attn.to_v.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[0].attn.to_v.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].attn.to_v._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[0].attn.to_v._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].attn.to_v._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].attn.to_v._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].attn.to_v._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[0].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[0].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].attn.norm_k, accessed_by=DictGetItemGuardAccessor(norm_k) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[0].attn.norm_k, 140581770186848) # if attn.norm_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1729 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].attn.norm_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[0].attn.norm_k.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].attn.norm_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[0].attn.norm_k.training, 140591004393440) # if attn.norm_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1729 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].attn.norm_k.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[0].attn.norm_k.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].attn.norm_k._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].attn.norm_k.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[0].attn.norm_k.weight, 140581765130624) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].attn.norm_k._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].attn.norm_k._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].attn.norm_k._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].attn.norm_k._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].attn.norm_q, accessed_by=DictGetItemGuardAccessor(norm_q) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[0].attn.norm_q, 140581770186704) # if attn.norm_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1727 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].attn.norm_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[0].attn.norm_q.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].attn.norm_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[0].attn.norm_q.training, 140591004393440) # if attn.norm_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1727 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].attn.norm_q.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[0].attn.norm_q.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].attn.norm_q._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].attn.norm_q.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[0].attn.norm_q.weight, 140581765886208) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].attn.norm_q._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].attn.norm_q._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].attn.norm_q._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].attn.norm_q._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].attn.heads, accessed_by=DictGetItemGuardAccessor(heads) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[0].attn.heads == 24 # head_dim = inner_dim // attn.heads # diffusers/src/diffusers/models/attention_processor.py:1721 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].attn.processor, accessed_by=DictGetItemGuardAccessor(processor) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[0].attn.processor, 93831581524080) # attn_parameters = set(inspect.signature(self.processor.__call__).parameters.keys()) # diffusers/src/diffusers/models/attention_processor.py:479 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[0].attn.processor, 140581770186608) # return self.processor( # diffusers/src/diffusers/models/attention_processor.py:490 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].attn._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].attn._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].attn._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].attn._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].attn.forward, accessed_by=GetAttrGuardAccessor(forward) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].attn.forward, accessed_by=FuncDefaultsGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].attn.forward.__defaults__[0], accessed_by=GetItemGuardAccessor(0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[0].attn.forward.__defaults__[0], 140591004478624) # batch_size, _, _ = hidden_states.shape if encoder_hidden_states is None else encoder_hidden_states.shape # diffusers/src/diffusers/models/attention_processor.py:1713 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].norm, accessed_by=DictGetItemGuardAccessor(norm) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[0].norm, 140581770186224) # norm_hidden_states, gate = self.norm(hidden_states, emb=temb) # diffusers/src/diffusers/models/transformers/transformer_flux.py:88 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].norm.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[0].norm.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].norm.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[0].norm.training, 140591004393440) # norm_hidden_states, gate = self.norm(hidden_states, emb=temb) # diffusers/src/diffusers/models/transformers/transformer_flux.py:88 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].norm._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].norm.norm, accessed_by=DictGetItemGuardAccessor(norm) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[0].norm.norm, 140581770186368) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:171 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].norm.norm.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].norm.norm.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[0].norm.norm.training, 140591004393440) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:171 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].norm.silu, accessed_by=DictGetItemGuardAccessor(silu) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[0].norm.silu, 140581770186272) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].norm.silu.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].norm.silu.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[0].norm.silu.training, 140591004393440) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].norm.linear, accessed_by=DictGetItemGuardAccessor(linear) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[0].norm.linear, 140533115123024) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].norm.linear.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[0].norm.linear.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].norm.linear.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[0].norm.linear.training, 140591004393408) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].norm.linear._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].norm.linear.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[0].norm.linear.lora_A, 140533115115632) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].norm.linear.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].norm.linear.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[0].norm.linear.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].norm.linear.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[0].norm.linear.lora_A['default_0'], 140533115124368) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].norm.linear.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].norm.linear.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[0].norm.linear.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].norm.linear.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].norm.linear.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[0].norm.linear.lora_A['default_0'].weight, 140537317843936) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].norm.linear.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[0].norm.linear.lora_B, 140533115116016) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].norm.linear.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].norm.linear.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[0].norm.linear.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].norm.linear.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[0].norm.linear.lora_B['default_0'], 140533115109920) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].norm.linear.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].norm.linear.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[0].norm.linear.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].norm.linear.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[0].norm.linear.base_layer, 140581770186320) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].norm.linear.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].norm.linear.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[0].norm.linear.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].norm.linear.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[0].norm.linear.lora_dropout, 140533115120336) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].norm.linear.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].norm.linear.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[0].norm.linear.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].norm.linear.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[0].norm.linear.lora_dropout['default_0'], 140533115110928) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].norm.linear.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].norm.linear.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[0].norm.linear.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].norm.linear.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[0].norm.linear.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[0].norm.linear.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].norm.linear.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[0].norm.linear.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].norm.linear.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[0].norm.linear.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[0].norm.linear.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].norm.linear.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[0].norm.linear.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].norm.linear._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].norm.linear._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].norm.linear.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[0].norm.linear.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[0].norm.linear.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].norm.linear._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[0].norm.linear._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].norm.linear._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].norm.linear._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].norm.linear._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[0].norm.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[0].norm.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].norm._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].norm._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].norm._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].norm._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].act_mlp, accessed_by=DictGetItemGuardAccessor(act_mlp) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[0].act_mlp, 140581770186512) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].act_mlp.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].act_mlp.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[0].act_mlp.training, 140591004393440) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].proj_mlp, accessed_by=DictGetItemGuardAccessor(proj_mlp) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[0].proj_mlp, 140533115120528) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].proj_mlp.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[0].proj_mlp.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].proj_mlp.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[0].proj_mlp.training, 140591004393408) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].proj_mlp._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].proj_mlp.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[0].proj_mlp.lora_A, 140533115109632) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].proj_mlp.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].proj_mlp.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[0].proj_mlp.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].proj_mlp.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[0].proj_mlp.lora_A['default_0'], 140533115344096) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].proj_mlp.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].proj_mlp.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[0].proj_mlp.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].proj_mlp.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].proj_mlp.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[0].proj_mlp.lora_A['default_0'].weight, 140537317840256) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].proj_mlp.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[0].proj_mlp.lora_B, 140533113629872) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].proj_mlp.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].proj_mlp.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[0].proj_mlp.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].proj_mlp.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[0].proj_mlp.lora_B['default_0'], 140533115342656) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].proj_mlp.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].proj_mlp.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[0].proj_mlp.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].proj_mlp.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[0].proj_mlp.base_layer, 140581770186464) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].proj_mlp.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].proj_mlp.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[0].proj_mlp.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].proj_mlp.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[0].proj_mlp.lora_dropout, 140533115109584) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].proj_mlp.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].proj_mlp.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[0].proj_mlp.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].proj_mlp.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[0].proj_mlp.lora_dropout['default_0'], 140533115119616) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].proj_mlp.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].proj_mlp.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[0].proj_mlp.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].proj_mlp.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[0].proj_mlp.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[0].proj_mlp.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].proj_mlp.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[0].proj_mlp.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].proj_mlp.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[0].proj_mlp.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[0].proj_mlp.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].proj_mlp.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[0].proj_mlp.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].proj_mlp._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].proj_mlp._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].proj_mlp.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[0].proj_mlp.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[0].proj_mlp.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].proj_mlp._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[0].proj_mlp._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].proj_mlp._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].proj_mlp._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].proj_mlp._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[0].proj_mlp._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[0].proj_mlp._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].proj_out, accessed_by=DictGetItemGuardAccessor(proj_out) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[0].proj_out, 140533115344384) # hidden_states = gate * self.proj_out(hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:98 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].proj_out.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[0].proj_out.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].proj_out.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[0].proj_out.training, 140591004393408) # hidden_states = gate * self.proj_out(hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:98 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].proj_out._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].proj_out.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[0].proj_out.lora_A, 140533115343760) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].proj_out.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].proj_out.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[0].proj_out.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].proj_out.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[0].proj_out.lora_A['default_0'], 140533115345344) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].proj_out.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].proj_out.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[0].proj_out.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].proj_out.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].proj_out.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[0].proj_out.lora_A['default_0'].weight, 140537317850096) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].proj_out.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[0].proj_out.lora_B, 140533115345728) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].proj_out.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].proj_out.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[0].proj_out.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].proj_out.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[0].proj_out.lora_B['default_0'], 140533115346016) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].proj_out.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].proj_out.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[0].proj_out.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].proj_out.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[0].proj_out.base_layer, 140581770186560) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].proj_out.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].proj_out.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[0].proj_out.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].proj_out.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[0].proj_out.lora_dropout, 140533115343664) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].proj_out.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].proj_out.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[0].proj_out.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].proj_out.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[0].proj_out.lora_dropout['default_0'], 140533115344336) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].proj_out.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].proj_out.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[0].proj_out.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].proj_out.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[0].proj_out.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[0].proj_out.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].proj_out.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[0].proj_out.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].proj_out.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[0].proj_out.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[0].proj_out.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].proj_out.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[0].proj_out.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].proj_out._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].proj_out._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].proj_out.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[0].proj_out.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[0].proj_out.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].proj_out._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[0].proj_out._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].proj_out._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].proj_out._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].proj_out._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[0].proj_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[0].proj_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1], accessed_by=GetItemGuardAccessor(1) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[1], 140581770186176) # for index_block, block in enumerate(self.single_transformer_blocks): # diffusers/src/diffusers/models/transformers/transformer_flux.py:509 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[1].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[1].training, 140591004393440) # for index_block, block in enumerate(self.single_transformer_blocks): # diffusers/src/diffusers/models/transformers/transformer_flux.py:509 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].attn, accessed_by=DictGetItemGuardAccessor(attn) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[1].attn, 140581770187424) # attn_output = self.attn( # diffusers/src/diffusers/models/transformers/transformer_flux.py:91 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].attn.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[1].attn.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].attn.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[1].attn.training, 140591004393440) # attn_output = self.attn( # diffusers/src/diffusers/models/transformers/transformer_flux.py:91 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].attn._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].attn.to_k, accessed_by=DictGetItemGuardAccessor(to_k) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[1].attn.to_k, 140533114227296) # key = attn.to_k(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1717 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].attn.to_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[1].attn.to_k.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].attn.to_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[1].attn.to_k.training, 140591004393408) # key = attn.to_k(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1717 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].attn.to_k._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].attn.to_k.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[1].attn.to_k.lora_A, 140533114238144) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].attn.to_k.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].attn.to_k.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[1].attn.to_k.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].attn.to_k.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[1].attn.to_k.lora_A['default_0'], 140533114233872) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].attn.to_k.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].attn.to_k.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[1].attn.to_k.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].attn.to_k.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].attn.to_k.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[1].attn.to_k.lora_A['default_0'].weight, 140537317529040) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].attn.to_k.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[1].attn.to_k.lora_B, 140533114231664) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].attn.to_k.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].attn.to_k.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[1].attn.to_k.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].attn.to_k.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[1].attn.to_k.lora_B['default_0'], 140533114233968) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].attn.to_k.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].attn.to_k.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[1].attn.to_k.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].attn.to_k.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[1].attn.to_k.base_layer, 140581770187568) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].attn.to_k.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].attn.to_k.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[1].attn.to_k.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].attn.to_k.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[1].attn.to_k.lora_dropout, 140533114227056) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].attn.to_k.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].attn.to_k.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[1].attn.to_k.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].attn.to_k.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[1].attn.to_k.lora_dropout['default_0'], 140533114226816) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].attn.to_k.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].attn.to_k.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[1].attn.to_k.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].attn.to_k.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[1].attn.to_k.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[1].attn.to_k.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].attn.to_k.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[1].attn.to_k.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].attn.to_k.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[1].attn.to_k.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[1].attn.to_k.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].attn.to_k.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[1].attn.to_k.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].attn.to_k._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].attn.to_k._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].attn.to_k.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[1].attn.to_k.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[1].attn.to_k.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].attn.to_k._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[1].attn.to_k._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].attn.to_k._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].attn.to_k._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].attn.to_k._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[1].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[1].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].attn.to_q, accessed_by=DictGetItemGuardAccessor(to_q) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[1].attn.to_q, 140533114238528) # query = attn.to_q(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1716 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].attn.to_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[1].attn.to_q.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].attn.to_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[1].attn.to_q.training, 140591004393408) # query = attn.to_q(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1716 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].attn.to_q._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].attn.to_q.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[1].attn.to_q.lora_A, 140533114229408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].attn.to_q.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].attn.to_q.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[1].attn.to_q.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].attn.to_q.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[1].attn.to_q.lora_A['default_0'], 140533114227584) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].attn.to_q.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].attn.to_q.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[1].attn.to_q.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].attn.to_q.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].attn.to_q.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[1].attn.to_q.lora_A['default_0'].weight, 140537317697760) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].attn.to_q.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[1].attn.to_q.lora_B, 140533114227728) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].attn.to_q.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].attn.to_q.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[1].attn.to_q.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].attn.to_q.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[1].attn.to_q.lora_B['default_0'], 140533114228064) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].attn.to_q.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].attn.to_q.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[1].attn.to_q.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].attn.to_q.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[1].attn.to_q.base_layer, 140581770187664) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].attn.to_q.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].attn.to_q.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[1].attn.to_q.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].attn.to_q.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[1].attn.to_q.lora_dropout, 140533114238288) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].attn.to_q.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].attn.to_q.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[1].attn.to_q.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].attn.to_q.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[1].attn.to_q.lora_dropout['default_0'], 140533114238672) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].attn.to_q.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].attn.to_q.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[1].attn.to_q.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].attn.to_q.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[1].attn.to_q.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[1].attn.to_q.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].attn.to_q.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[1].attn.to_q.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].attn.to_q.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[1].attn.to_q.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[1].attn.to_q.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].attn.to_q.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[1].attn.to_q.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].attn.to_q._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].attn.to_q._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].attn.to_q.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[1].attn.to_q.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[1].attn.to_q.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].attn.to_q._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[1].attn.to_q._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].attn.to_q._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].attn.to_q._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].attn.to_q._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[1].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[1].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].attn.to_v, accessed_by=DictGetItemGuardAccessor(to_v) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[1].attn.to_v, 140533114231904) # value = attn.to_v(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1718 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].attn.to_v.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[1].attn.to_v.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].attn.to_v.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[1].attn.to_v.training, 140591004393408) # value = attn.to_v(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1718 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].attn.to_v._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].attn.to_v.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[1].attn.to_v.lora_A, 140533114229072) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].attn.to_v.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].attn.to_v.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[1].attn.to_v.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].attn.to_v.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[1].attn.to_v.lora_A['default_0'], 140533115495776) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].attn.to_v.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].attn.to_v.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[1].attn.to_v.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].attn.to_v.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].attn.to_v.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[1].attn.to_v.lora_A['default_0'].weight, 140537317530080) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].attn.to_v.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[1].attn.to_v.lora_B, 140533114237952) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].attn.to_v.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].attn.to_v.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[1].attn.to_v.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].attn.to_v.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[1].attn.to_v.lora_B['default_0'], 140533115496016) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].attn.to_v.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].attn.to_v.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[1].attn.to_v.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].attn.to_v.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[1].attn.to_v.base_layer, 140581770187712) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].attn.to_v.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].attn.to_v.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[1].attn.to_v.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].attn.to_v.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[1].attn.to_v.lora_dropout, 140533114233152) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].attn.to_v.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].attn.to_v.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[1].attn.to_v.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].attn.to_v.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[1].attn.to_v.lora_dropout['default_0'], 140533114233488) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].attn.to_v.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].attn.to_v.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[1].attn.to_v.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].attn.to_v.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[1].attn.to_v.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[1].attn.to_v.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].attn.to_v.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[1].attn.to_v.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].attn.to_v.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[1].attn.to_v.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[1].attn.to_v.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].attn.to_v.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[1].attn.to_v.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].attn.to_v._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].attn.to_v._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].attn.to_v.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[1].attn.to_v.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[1].attn.to_v.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].attn.to_v._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[1].attn.to_v._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].attn.to_v._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].attn.to_v._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].attn.to_v._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[1].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[1].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].attn.norm_k, accessed_by=DictGetItemGuardAccessor(norm_k) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[1].attn.norm_k, 140581770187616) # if attn.norm_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1729 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].attn.norm_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[1].attn.norm_k.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].attn.norm_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[1].attn.norm_k.training, 140591004393440) # if attn.norm_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1729 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].attn.norm_k.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[1].attn.norm_k.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].attn.norm_k._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].attn.norm_k.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[1].attn.norm_k.weight, 140581765129024) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].attn.norm_k._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].attn.norm_k._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].attn.norm_k._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].attn.norm_k._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].attn.norm_q, accessed_by=DictGetItemGuardAccessor(norm_q) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[1].attn.norm_q, 140581770187472) # if attn.norm_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1727 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].attn.norm_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[1].attn.norm_q.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].attn.norm_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[1].attn.norm_q.training, 140591004393440) # if attn.norm_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1727 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].attn.norm_q.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[1].attn.norm_q.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].attn.norm_q._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].attn.norm_q.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[1].attn.norm_q.weight, 140581783344112) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].attn.norm_q._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].attn.norm_q._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].attn.norm_q._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].attn.norm_q._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].attn.heads, accessed_by=DictGetItemGuardAccessor(heads) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[1].attn.heads == 24 # head_dim = inner_dim // attn.heads # diffusers/src/diffusers/models/attention_processor.py:1721 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].attn.processor, accessed_by=DictGetItemGuardAccessor(processor) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[1].attn.processor, 93831581524080) # attn_parameters = set(inspect.signature(self.processor.__call__).parameters.keys()) # diffusers/src/diffusers/models/attention_processor.py:479 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[1].attn.processor, 140581770187376) # return self.processor( # diffusers/src/diffusers/models/attention_processor.py:490 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].attn._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].attn._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].attn._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].attn._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].attn.forward, accessed_by=GetAttrGuardAccessor(forward) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].attn.forward, accessed_by=FuncDefaultsGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].attn.forward.__defaults__[0], accessed_by=GetItemGuardAccessor(0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[1].attn.forward.__defaults__[0], 140591004478624) # batch_size, _, _ = hidden_states.shape if encoder_hidden_states is None else encoder_hidden_states.shape # diffusers/src/diffusers/models/attention_processor.py:1713 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].norm, accessed_by=DictGetItemGuardAccessor(norm) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[1].norm, 140581770187040) # norm_hidden_states, gate = self.norm(hidden_states, emb=temb) # diffusers/src/diffusers/models/transformers/transformer_flux.py:88 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].norm.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[1].norm.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].norm.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[1].norm.training, 140591004393440) # norm_hidden_states, gate = self.norm(hidden_states, emb=temb) # diffusers/src/diffusers/models/transformers/transformer_flux.py:88 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].norm._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].norm.norm, accessed_by=DictGetItemGuardAccessor(norm) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[1].norm.norm, 140581770187184) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:171 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].norm.norm.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].norm.norm.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[1].norm.norm.training, 140591004393440) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:171 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].norm.silu, accessed_by=DictGetItemGuardAccessor(silu) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[1].norm.silu, 140581770187088) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].norm.silu.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].norm.silu.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[1].norm.silu.training, 140591004393440) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].norm.linear, accessed_by=DictGetItemGuardAccessor(linear) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[1].norm.linear, 140533116551120) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].norm.linear.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[1].norm.linear.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].norm.linear.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[1].norm.linear.training, 140591004393408) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].norm.linear._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].norm.linear.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[1].norm.linear.lora_A, 140533116538064) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].norm.linear.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].norm.linear.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[1].norm.linear.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].norm.linear.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[1].norm.linear.lora_A['default_0'], 140533116536240) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].norm.linear.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].norm.linear.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[1].norm.linear.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].norm.linear.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].norm.linear.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[1].norm.linear.lora_A['default_0'].weight, 140537317690240) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].norm.linear.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[1].norm.linear.lora_B, 140533116537776) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].norm.linear.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].norm.linear.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[1].norm.linear.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].norm.linear.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[1].norm.linear.lora_B['default_0'], 140533116542864) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].norm.linear.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].norm.linear.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[1].norm.linear.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].norm.linear.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[1].norm.linear.base_layer, 140581770187136) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].norm.linear.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].norm.linear.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[1].norm.linear.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].norm.linear.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[1].norm.linear.lora_dropout, 140533116540752) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].norm.linear.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].norm.linear.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[1].norm.linear.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].norm.linear.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[1].norm.linear.lora_dropout['default_0'], 140533116547664) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].norm.linear.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].norm.linear.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[1].norm.linear.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].norm.linear.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[1].norm.linear.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[1].norm.linear.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].norm.linear.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[1].norm.linear.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].norm.linear.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[1].norm.linear.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[1].norm.linear.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].norm.linear.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[1].norm.linear.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].norm.linear._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].norm.linear._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].norm.linear.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[1].norm.linear.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[1].norm.linear.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].norm.linear._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[1].norm.linear._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].norm.linear._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].norm.linear._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].norm.linear._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[1].norm.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[1].norm.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].norm._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].norm._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].norm._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].norm._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].act_mlp, accessed_by=DictGetItemGuardAccessor(act_mlp) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[1].act_mlp, 140581770187280) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].act_mlp.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].act_mlp.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[1].act_mlp.training, 140591004393440) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].proj_mlp, accessed_by=DictGetItemGuardAccessor(proj_mlp) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[1].proj_mlp, 140533114149264) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].proj_mlp.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[1].proj_mlp.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].proj_mlp.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[1].proj_mlp.training, 140591004393408) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].proj_mlp._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].proj_mlp.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[1].proj_mlp.lora_A, 140533113987680) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].proj_mlp.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].proj_mlp.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[1].proj_mlp.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].proj_mlp.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[1].proj_mlp.lora_A['default_0'], 140533114228592) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].proj_mlp.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].proj_mlp.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[1].proj_mlp.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].proj_mlp.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].proj_mlp.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[1].proj_mlp.lora_A['default_0'].weight, 140537317696800) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].proj_mlp.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[1].proj_mlp.lora_B, 140533113988448) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].proj_mlp.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].proj_mlp.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[1].proj_mlp.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].proj_mlp.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[1].proj_mlp.lora_B['default_0'], 140533114228544) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].proj_mlp.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].proj_mlp.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[1].proj_mlp.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].proj_mlp.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[1].proj_mlp.base_layer, 140581770187232) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].proj_mlp.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].proj_mlp.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[1].proj_mlp.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].proj_mlp.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[1].proj_mlp.lora_dropout, 140533114158336) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].proj_mlp.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].proj_mlp.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[1].proj_mlp.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].proj_mlp.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[1].proj_mlp.lora_dropout['default_0'], 140533114149408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].proj_mlp.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].proj_mlp.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[1].proj_mlp.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].proj_mlp.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[1].proj_mlp.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[1].proj_mlp.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].proj_mlp.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[1].proj_mlp.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].proj_mlp.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[1].proj_mlp.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[1].proj_mlp.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].proj_mlp.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[1].proj_mlp.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].proj_mlp._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].proj_mlp._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].proj_mlp.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[1].proj_mlp.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[1].proj_mlp.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].proj_mlp._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[1].proj_mlp._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].proj_mlp._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].proj_mlp._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].proj_mlp._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[1].proj_mlp._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[1].proj_mlp._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].proj_out, accessed_by=DictGetItemGuardAccessor(proj_out) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[1].proj_out, 140533114228016) # hidden_states = gate * self.proj_out(hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:98 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].proj_out.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[1].proj_out.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].proj_out.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[1].proj_out.training, 140591004393408) # hidden_states = gate * self.proj_out(hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:98 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].proj_out._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].proj_out.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[1].proj_out.lora_A, 140533114227680) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].proj_out.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].proj_out.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[1].proj_out.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].proj_out.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[1].proj_out.lora_A['default_0'], 140533114237568) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].proj_out.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].proj_out.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[1].proj_out.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].proj_out.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].proj_out.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[1].proj_out.lora_A['default_0'].weight, 140537317701440) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].proj_out.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[1].proj_out.lora_B, 140533114228208) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].proj_out.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].proj_out.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[1].proj_out.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].proj_out.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[1].proj_out.lora_B['default_0'], 140533114235840) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].proj_out.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].proj_out.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[1].proj_out.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].proj_out.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[1].proj_out.base_layer, 140581770187328) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].proj_out.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].proj_out.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[1].proj_out.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].proj_out.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[1].proj_out.lora_dropout, 140533114227440) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].proj_out.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].proj_out.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[1].proj_out.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].proj_out.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[1].proj_out.lora_dropout['default_0'], 140533114228352) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].proj_out.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].proj_out.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[1].proj_out.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].proj_out.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[1].proj_out.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[1].proj_out.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].proj_out.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[1].proj_out.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].proj_out.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[1].proj_out.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[1].proj_out.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].proj_out.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[1].proj_out.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].proj_out._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].proj_out._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].proj_out.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[1].proj_out.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[1].proj_out.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].proj_out._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[1].proj_out._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].proj_out._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].proj_out._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].proj_out._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[1].proj_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[1].proj_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2], accessed_by=GetItemGuardAccessor(2) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[2], 140581770186992) # for index_block, block in enumerate(self.single_transformer_blocks): # diffusers/src/diffusers/models/transformers/transformer_flux.py:509 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[2].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[2].training, 140591004393440) # for index_block, block in enumerate(self.single_transformer_blocks): # diffusers/src/diffusers/models/transformers/transformer_flux.py:509 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].attn, accessed_by=DictGetItemGuardAccessor(attn) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[2].attn, 140581770188192) # attn_output = self.attn( # diffusers/src/diffusers/models/transformers/transformer_flux.py:91 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].attn.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[2].attn.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].attn.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[2].attn.training, 140591004393440) # attn_output = self.attn( # diffusers/src/diffusers/models/transformers/transformer_flux.py:91 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].attn._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].attn.to_k, accessed_by=DictGetItemGuardAccessor(to_k) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[2].attn.to_k, 140533113730384) # key = attn.to_k(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1717 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].attn.to_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[2].attn.to_k.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].attn.to_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[2].attn.to_k.training, 140591004393408) # key = attn.to_k(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1717 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].attn.to_k._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].attn.to_k.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[2].attn.to_k.lora_A, 140533113731392) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].attn.to_k.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].attn.to_k.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[2].attn.to_k.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].attn.to_k.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[2].attn.to_k.lora_A['default_0'], 140533113719392) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].attn.to_k.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].attn.to_k.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[2].attn.to_k.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].attn.to_k.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].attn.to_k.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[2].attn.to_k.lora_A['default_0'].weight, 140537317528160) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].attn.to_k.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[2].attn.to_k.lora_B, 140533113723136) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].attn.to_k.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].attn.to_k.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[2].attn.to_k.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].attn.to_k.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[2].attn.to_k.lora_B['default_0'], 140533113718384) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].attn.to_k.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].attn.to_k.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[2].attn.to_k.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].attn.to_k.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[2].attn.to_k.base_layer, 140581770188336) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].attn.to_k.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].attn.to_k.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[2].attn.to_k.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].attn.to_k.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[2].attn.to_k.lora_dropout, 140533113732640) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].attn.to_k.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].attn.to_k.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[2].attn.to_k.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].attn.to_k.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[2].attn.to_k.lora_dropout['default_0'], 140533113719488) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].attn.to_k.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].attn.to_k.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[2].attn.to_k.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].attn.to_k.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[2].attn.to_k.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[2].attn.to_k.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].attn.to_k.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[2].attn.to_k.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].attn.to_k.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[2].attn.to_k.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[2].attn.to_k.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].attn.to_k.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[2].attn.to_k.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].attn.to_k._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].attn.to_k._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].attn.to_k.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[2].attn.to_k.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[2].attn.to_k.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].attn.to_k._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[2].attn.to_k._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].attn.to_k._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].attn.to_k._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].attn.to_k._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[2].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[2].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].attn.to_q, accessed_by=DictGetItemGuardAccessor(to_q) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[2].attn.to_q, 140533113729856) # query = attn.to_q(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1716 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].attn.to_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[2].attn.to_q.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].attn.to_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[2].attn.to_q.training, 140591004393408) # query = attn.to_q(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1716 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].attn.to_q._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].attn.to_q.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[2].attn.to_q.lora_A, 140533113725824) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].attn.to_q.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].attn.to_q.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[2].attn.to_q.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].attn.to_q.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[2].attn.to_q.lora_A['default_0'], 140533113718336) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].attn.to_q.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].attn.to_q.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[2].attn.to_q.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].attn.to_q.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].attn.to_q.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[2].attn.to_q.lora_A['default_0'].weight, 140537317533760) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].attn.to_q.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[2].attn.to_q.lora_B, 140533113731488) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].attn.to_q.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].attn.to_q.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[2].attn.to_q.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].attn.to_q.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[2].attn.to_q.lora_B['default_0'], 140533113720256) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].attn.to_q.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].attn.to_q.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[2].attn.to_q.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].attn.to_q.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[2].attn.to_q.base_layer, 140581770188432) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].attn.to_q.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].attn.to_q.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[2].attn.to_q.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].attn.to_q.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[2].attn.to_q.lora_dropout, 140533113731584) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].attn.to_q.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].attn.to_q.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[2].attn.to_q.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].attn.to_q.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[2].attn.to_q.lora_dropout['default_0'], 140533113725584) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].attn.to_q.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].attn.to_q.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[2].attn.to_q.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].attn.to_q.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[2].attn.to_q.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[2].attn.to_q.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].attn.to_q.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[2].attn.to_q.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].attn.to_q.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[2].attn.to_q.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[2].attn.to_q.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].attn.to_q.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[2].attn.to_q.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].attn.to_q._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].attn.to_q._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].attn.to_q.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[2].attn.to_q.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[2].attn.to_q.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].attn.to_q._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[2].attn.to_q._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].attn.to_q._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].attn.to_q._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].attn.to_q._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[2].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[2].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].attn.to_v, accessed_by=DictGetItemGuardAccessor(to_v) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[2].attn.to_v, 140533113731344) # value = attn.to_v(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1718 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].attn.to_v.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[2].attn.to_v.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].attn.to_v.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[2].attn.to_v.training, 140591004393408) # value = attn.to_v(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1718 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].attn.to_v._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].attn.to_v.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[2].attn.to_v.lora_A, 140533113726976) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].attn.to_v.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].attn.to_v.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[2].attn.to_v.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].attn.to_v.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[2].attn.to_v.lora_A['default_0'], 140533113682480) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].attn.to_v.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].attn.to_v.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[2].attn.to_v.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].attn.to_v.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].attn.to_v.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[2].attn.to_v.lora_A['default_0'].weight, 140537317328432) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].attn.to_v.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[2].attn.to_v.lora_B, 140533113675952) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].attn.to_v.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].attn.to_v.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[2].attn.to_v.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].attn.to_v.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[2].attn.to_v.lora_B['default_0'], 140533113671872) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].attn.to_v.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].attn.to_v.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[2].attn.to_v.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].attn.to_v.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[2].attn.to_v.base_layer, 140581770188480) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].attn.to_v.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].attn.to_v.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[2].attn.to_v.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].attn.to_v.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[2].attn.to_v.lora_dropout, 140533113719872) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].attn.to_v.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].attn.to_v.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[2].attn.to_v.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].attn.to_v.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[2].attn.to_v.lora_dropout['default_0'], 140533113719776) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].attn.to_v.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].attn.to_v.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[2].attn.to_v.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].attn.to_v.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[2].attn.to_v.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[2].attn.to_v.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].attn.to_v.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[2].attn.to_v.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].attn.to_v.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[2].attn.to_v.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[2].attn.to_v.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].attn.to_v.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[2].attn.to_v.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].attn.to_v._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].attn.to_v._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].attn.to_v.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[2].attn.to_v.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[2].attn.to_v.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].attn.to_v._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[2].attn.to_v._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].attn.to_v._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].attn.to_v._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].attn.to_v._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[2].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[2].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].attn.norm_k, accessed_by=DictGetItemGuardAccessor(norm_k) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[2].attn.norm_k, 140581770188384) # if attn.norm_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1729 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].attn.norm_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[2].attn.norm_k.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].attn.norm_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[2].attn.norm_k.training, 140591004393440) # if attn.norm_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1729 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].attn.norm_k.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[2].attn.norm_k.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].attn.norm_k._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].attn.norm_k.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[2].attn.norm_k.weight, 140581772721376) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].attn.norm_k._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].attn.norm_k._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].attn.norm_k._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].attn.norm_k._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].attn.norm_q, accessed_by=DictGetItemGuardAccessor(norm_q) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[2].attn.norm_q, 140581770188240) # if attn.norm_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1727 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].attn.norm_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[2].attn.norm_q.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].attn.norm_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[2].attn.norm_q.training, 140591004393440) # if attn.norm_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1727 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].attn.norm_q.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[2].attn.norm_q.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].attn.norm_q._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].attn.norm_q.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[2].attn.norm_q.weight, 140581765987072) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].attn.norm_q._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].attn.norm_q._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].attn.norm_q._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].attn.norm_q._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].attn.heads, accessed_by=DictGetItemGuardAccessor(heads) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[2].attn.heads == 24 # head_dim = inner_dim // attn.heads # diffusers/src/diffusers/models/attention_processor.py:1721 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].attn.processor, accessed_by=DictGetItemGuardAccessor(processor) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[2].attn.processor, 93831581524080) # attn_parameters = set(inspect.signature(self.processor.__call__).parameters.keys()) # diffusers/src/diffusers/models/attention_processor.py:479 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[2].attn.processor, 140581770188144) # return self.processor( # diffusers/src/diffusers/models/attention_processor.py:490 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].attn._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].attn._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].attn._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].attn._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].attn.forward, accessed_by=GetAttrGuardAccessor(forward) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].attn.forward, accessed_by=FuncDefaultsGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].attn.forward.__defaults__[0], accessed_by=GetItemGuardAccessor(0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[2].attn.forward.__defaults__[0], 140591004478624) # batch_size, _, _ = hidden_states.shape if encoder_hidden_states is None else encoder_hidden_states.shape # diffusers/src/diffusers/models/attention_processor.py:1713 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].norm, accessed_by=DictGetItemGuardAccessor(norm) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[2].norm, 140581770187808) # norm_hidden_states, gate = self.norm(hidden_states, emb=temb) # diffusers/src/diffusers/models/transformers/transformer_flux.py:88 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].norm.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[2].norm.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].norm.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[2].norm.training, 140591004393440) # norm_hidden_states, gate = self.norm(hidden_states, emb=temb) # diffusers/src/diffusers/models/transformers/transformer_flux.py:88 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].norm._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].norm.norm, accessed_by=DictGetItemGuardAccessor(norm) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[2].norm.norm, 140581770187952) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:171 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].norm.norm.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].norm.norm.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[2].norm.norm.training, 140591004393440) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:171 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].norm.silu, accessed_by=DictGetItemGuardAccessor(silu) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[2].norm.silu, 140581770187856) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].norm.silu.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].norm.silu.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[2].norm.silu.training, 140591004393440) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].norm.linear, accessed_by=DictGetItemGuardAccessor(linear) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[2].norm.linear, 140533115488672) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].norm.linear.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[2].norm.linear.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].norm.linear.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[2].norm.linear.training, 140591004393408) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].norm.linear._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].norm.linear.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[2].norm.linear.lora_A, 140533115580624) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].norm.linear.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].norm.linear.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[2].norm.linear.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].norm.linear.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[2].norm.linear.lora_A['default_0'], 140533115578512) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].norm.linear.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].norm.linear.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[2].norm.linear.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].norm.linear.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].norm.linear.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[2].norm.linear.lora_A['default_0'].weight, 140537317530640) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].norm.linear.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[2].norm.linear.lora_B, 140533115581872) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].norm.linear.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].norm.linear.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[2].norm.linear.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].norm.linear.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[2].norm.linear.lora_B['default_0'], 140533115569632) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].norm.linear.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].norm.linear.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[2].norm.linear.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].norm.linear.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[2].norm.linear.base_layer, 140581770187904) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].norm.linear.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].norm.linear.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[2].norm.linear.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].norm.linear.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[2].norm.linear.lora_dropout, 140533115501104) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].norm.linear.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].norm.linear.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[2].norm.linear.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].norm.linear.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[2].norm.linear.lora_dropout['default_0'], 140533115488912) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].norm.linear.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].norm.linear.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[2].norm.linear.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].norm.linear.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[2].norm.linear.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[2].norm.linear.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].norm.linear.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[2].norm.linear.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].norm.linear.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[2].norm.linear.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[2].norm.linear.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].norm.linear.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[2].norm.linear.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].norm.linear._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].norm.linear._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].norm.linear.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[2].norm.linear.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[2].norm.linear.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].norm.linear._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[2].norm.linear._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].norm.linear._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].norm.linear._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].norm.linear._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[2].norm.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[2].norm.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].norm._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].norm._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].norm._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].norm._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].act_mlp, accessed_by=DictGetItemGuardAccessor(act_mlp) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[2].act_mlp, 140581770188048) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].act_mlp.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].act_mlp.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[2].act_mlp.training, 140591004393440) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].proj_mlp, accessed_by=DictGetItemGuardAccessor(proj_mlp) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[2].proj_mlp, 140533115579280) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].proj_mlp.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[2].proj_mlp.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].proj_mlp.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[2].proj_mlp.training, 140591004393408) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].proj_mlp._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].proj_mlp.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[2].proj_mlp.lora_A, 140533115427696) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].proj_mlp.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].proj_mlp.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[2].proj_mlp.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].proj_mlp.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[2].proj_mlp.lora_A['default_0'], 140533115434464) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].proj_mlp.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].proj_mlp.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[2].proj_mlp.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].proj_mlp.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].proj_mlp.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[2].proj_mlp.lora_A['default_0'].weight, 140537317529520) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].proj_mlp.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[2].proj_mlp.lora_B, 140533115427744) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].proj_mlp.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].proj_mlp.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[2].proj_mlp.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].proj_mlp.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[2].proj_mlp.lora_B['default_0'], 140533115422848) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].proj_mlp.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].proj_mlp.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[2].proj_mlp.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].proj_mlp.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[2].proj_mlp.base_layer, 140581770188000) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].proj_mlp.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].proj_mlp.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[2].proj_mlp.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].proj_mlp.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[2].proj_mlp.lora_dropout, 140533115581248) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].proj_mlp.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].proj_mlp.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[2].proj_mlp.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].proj_mlp.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[2].proj_mlp.lora_dropout['default_0'], 140533115573280) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].proj_mlp.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].proj_mlp.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[2].proj_mlp.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].proj_mlp.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[2].proj_mlp.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[2].proj_mlp.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].proj_mlp.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[2].proj_mlp.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].proj_mlp.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[2].proj_mlp.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[2].proj_mlp.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].proj_mlp.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[2].proj_mlp.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].proj_mlp._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].proj_mlp._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].proj_mlp.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[2].proj_mlp.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[2].proj_mlp.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].proj_mlp._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[2].proj_mlp._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].proj_mlp._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].proj_mlp._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].proj_mlp._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[2].proj_mlp._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[2].proj_mlp._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].proj_out, accessed_by=DictGetItemGuardAccessor(proj_out) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[2].proj_out, 140533116657360) # hidden_states = gate * self.proj_out(hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:98 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].proj_out.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[2].proj_out.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].proj_out.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[2].proj_out.training, 140591004393408) # hidden_states = gate * self.proj_out(hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:98 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].proj_out._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].proj_out.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[2].proj_out.lora_A, 140533113719248) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].proj_out.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].proj_out.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[2].proj_out.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].proj_out.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[2].proj_out.lora_A['default_0'], 140533113722752) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].proj_out.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].proj_out.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[2].proj_out.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].proj_out.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].proj_out.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[2].proj_out.lora_A['default_0'].weight, 140537317527120) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].proj_out.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[2].proj_out.lora_B, 140533113730240) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].proj_out.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].proj_out.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[2].proj_out.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].proj_out.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[2].proj_out.lora_B['default_0'], 140533113724096) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].proj_out.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].proj_out.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[2].proj_out.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].proj_out.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[2].proj_out.base_layer, 140581770188096) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].proj_out.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].proj_out.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[2].proj_out.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].proj_out.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[2].proj_out.lora_dropout, 140533113723088) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].proj_out.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].proj_out.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[2].proj_out.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].proj_out.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[2].proj_out.lora_dropout['default_0'], 140533113726448) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].proj_out.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].proj_out.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[2].proj_out.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].proj_out.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[2].proj_out.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[2].proj_out.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].proj_out.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[2].proj_out.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].proj_out.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[2].proj_out.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[2].proj_out.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].proj_out.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[2].proj_out.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].proj_out._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].proj_out._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].proj_out.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[2].proj_out.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[2].proj_out.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].proj_out._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[2].proj_out._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].proj_out._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].proj_out._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].proj_out._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[2].proj_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[2].proj_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3], accessed_by=GetItemGuardAccessor(3) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[3], 140581770187760) # for index_block, block in enumerate(self.single_transformer_blocks): # diffusers/src/diffusers/models/transformers/transformer_flux.py:509 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[3].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[3].training, 140591004393440) # for index_block, block in enumerate(self.single_transformer_blocks): # diffusers/src/diffusers/models/transformers/transformer_flux.py:509 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].attn, accessed_by=DictGetItemGuardAccessor(attn) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[3].attn, 140581770188960) # attn_output = self.attn( # diffusers/src/diffusers/models/transformers/transformer_flux.py:91 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].attn.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[3].attn.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].attn.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[3].attn.training, 140591004393440) # attn_output = self.attn( # diffusers/src/diffusers/models/transformers/transformer_flux.py:91 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].attn._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].attn.to_k, accessed_by=DictGetItemGuardAccessor(to_k) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[3].attn.to_k, 140533114503664) # key = attn.to_k(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1717 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].attn.to_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[3].attn.to_k.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].attn.to_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[3].attn.to_k.training, 140591004393408) # key = attn.to_k(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1717 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].attn.to_k._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].attn.to_k.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[3].attn.to_k.lora_A, 140533114518112) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].attn.to_k.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].attn.to_k.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[3].attn.to_k.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].attn.to_k.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[3].attn.to_k.lora_A['default_0'], 140533114518640) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].attn.to_k.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].attn.to_k.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[3].attn.to_k.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].attn.to_k.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].attn.to_k.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[3].attn.to_k.lora_A['default_0'].weight, 140537317330592) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].attn.to_k.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[3].attn.to_k.lora_B, 140533114509712) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].attn.to_k.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].attn.to_k.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[3].attn.to_k.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].attn.to_k.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[3].attn.to_k.lora_B['default_0'], 140533114518544) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].attn.to_k.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].attn.to_k.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[3].attn.to_k.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].attn.to_k.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[3].attn.to_k.base_layer, 140581770189104) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].attn.to_k.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].attn.to_k.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[3].attn.to_k.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].attn.to_k.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[3].attn.to_k.lora_dropout, 140533114511392) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].attn.to_k.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].attn.to_k.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[3].attn.to_k.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].attn.to_k.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[3].attn.to_k.lora_dropout['default_0'], 140533114518448) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].attn.to_k.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].attn.to_k.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[3].attn.to_k.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].attn.to_k.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[3].attn.to_k.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[3].attn.to_k.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].attn.to_k.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[3].attn.to_k.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].attn.to_k.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[3].attn.to_k.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[3].attn.to_k.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].attn.to_k.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[3].attn.to_k.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].attn.to_k._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].attn.to_k._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].attn.to_k.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[3].attn.to_k.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[3].attn.to_k.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].attn.to_k._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[3].attn.to_k._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].attn.to_k._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].attn.to_k._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].attn.to_k._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[3].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[3].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].attn.to_q, accessed_by=DictGetItemGuardAccessor(to_q) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[3].attn.to_q, 140533113596048) # query = attn.to_q(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1716 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].attn.to_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[3].attn.to_q.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].attn.to_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[3].attn.to_q.training, 140591004393408) # query = attn.to_q(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1716 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].attn.to_q._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].attn.to_q.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[3].attn.to_q.lora_A, 140533114333232) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].attn.to_q.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].attn.to_q.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[3].attn.to_q.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].attn.to_q.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[3].attn.to_q.lora_A['default_0'], 140533114516240) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].attn.to_q.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].attn.to_q.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[3].attn.to_q.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].attn.to_q.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].attn.to_q.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[3].attn.to_q.lora_A['default_0'].weight, 140537317334192) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].attn.to_q.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[3].attn.to_q.lora_B, 140533114517728) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].attn.to_q.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].attn.to_q.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[3].attn.to_q.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].attn.to_q.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[3].attn.to_q.lora_B['default_0'], 140533114512256) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].attn.to_q.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].attn.to_q.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[3].attn.to_q.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].attn.to_q.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[3].attn.to_q.base_layer, 140581770189200) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].attn.to_q.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].attn.to_q.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[3].attn.to_q.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].attn.to_q.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[3].attn.to_q.lora_dropout, 140533114333088) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].attn.to_q.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].attn.to_q.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[3].attn.to_q.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].attn.to_q.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[3].attn.to_q.lora_dropout['default_0'], 140533114334144) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].attn.to_q.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].attn.to_q.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[3].attn.to_q.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].attn.to_q.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[3].attn.to_q.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[3].attn.to_q.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].attn.to_q.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[3].attn.to_q.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].attn.to_q.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[3].attn.to_q.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[3].attn.to_q.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].attn.to_q.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[3].attn.to_q.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].attn.to_q._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].attn.to_q._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].attn.to_q.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[3].attn.to_q.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[3].attn.to_q.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].attn.to_q._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[3].attn.to_q._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].attn.to_q._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].attn.to_q._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].attn.to_q._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[3].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[3].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].attn.to_v, accessed_by=DictGetItemGuardAccessor(to_v) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[3].attn.to_v, 140533114517392) # value = attn.to_v(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1718 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].attn.to_v.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[3].attn.to_v.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].attn.to_v.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[3].attn.to_v.training, 140591004393408) # value = attn.to_v(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1718 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].attn.to_v._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].attn.to_v.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[3].attn.to_v.lora_A, 140533114508176) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].attn.to_v.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].attn.to_v.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[3].attn.to_v.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].attn.to_v.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[3].attn.to_v.lora_A['default_0'], 140533114507840) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].attn.to_v.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].attn.to_v.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[3].attn.to_v.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].attn.to_v.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].attn.to_v.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[3].attn.to_v.lora_A['default_0'].weight, 140537319298832) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].attn.to_v.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[3].attn.to_v.lora_B, 140533114518880) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].attn.to_v.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].attn.to_v.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[3].attn.to_v.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].attn.to_v.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[3].attn.to_v.lora_B['default_0'], 140533114507120) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].attn.to_v.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].attn.to_v.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[3].attn.to_v.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].attn.to_v.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[3].attn.to_v.base_layer, 140581770189248) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].attn.to_v.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].attn.to_v.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[3].attn.to_v.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].attn.to_v.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[3].attn.to_v.lora_dropout, 140533114519456) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].attn.to_v.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].attn.to_v.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[3].attn.to_v.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].attn.to_v.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[3].attn.to_v.lora_dropout['default_0'], 140533114519360) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].attn.to_v.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].attn.to_v.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[3].attn.to_v.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].attn.to_v.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[3].attn.to_v.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[3].attn.to_v.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].attn.to_v.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[3].attn.to_v.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].attn.to_v.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[3].attn.to_v.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[3].attn.to_v.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].attn.to_v.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[3].attn.to_v.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].attn.to_v._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].attn.to_v._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].attn.to_v.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[3].attn.to_v.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[3].attn.to_v.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].attn.to_v._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[3].attn.to_v._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].attn.to_v._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].attn.to_v._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].attn.to_v._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[3].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[3].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].attn.norm_k, accessed_by=DictGetItemGuardAccessor(norm_k) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[3].attn.norm_k, 140581770189152) # if attn.norm_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1729 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].attn.norm_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[3].attn.norm_k.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].attn.norm_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[3].attn.norm_k.training, 140591004393440) # if attn.norm_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1729 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].attn.norm_k.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[3].attn.norm_k.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].attn.norm_k._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].attn.norm_k.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[3].attn.norm_k.weight, 140581772786432) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].attn.norm_k._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].attn.norm_k._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].attn.norm_k._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].attn.norm_k._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].attn.norm_q, accessed_by=DictGetItemGuardAccessor(norm_q) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[3].attn.norm_q, 140581770189008) # if attn.norm_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1727 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].attn.norm_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[3].attn.norm_q.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].attn.norm_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[3].attn.norm_q.training, 140591004393440) # if attn.norm_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1727 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].attn.norm_q.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[3].attn.norm_q.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].attn.norm_q._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].attn.norm_q.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[3].attn.norm_q.weight, 140581772783632) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].attn.norm_q._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].attn.norm_q._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].attn.norm_q._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].attn.norm_q._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].attn.heads, accessed_by=DictGetItemGuardAccessor(heads) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[3].attn.heads == 24 # head_dim = inner_dim // attn.heads # diffusers/src/diffusers/models/attention_processor.py:1721 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].attn.processor, accessed_by=DictGetItemGuardAccessor(processor) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[3].attn.processor, 93831581524080) # attn_parameters = set(inspect.signature(self.processor.__call__).parameters.keys()) # diffusers/src/diffusers/models/attention_processor.py:479 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[3].attn.processor, 140581770188912) # return self.processor( # diffusers/src/diffusers/models/attention_processor.py:490 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].attn._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].attn._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].attn._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].attn._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].attn.forward, accessed_by=GetAttrGuardAccessor(forward) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].attn.forward, accessed_by=FuncDefaultsGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].attn.forward.__defaults__[0], accessed_by=GetItemGuardAccessor(0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[3].attn.forward.__defaults__[0], 140591004478624) # batch_size, _, _ = hidden_states.shape if encoder_hidden_states is None else encoder_hidden_states.shape # diffusers/src/diffusers/models/attention_processor.py:1713 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].norm, accessed_by=DictGetItemGuardAccessor(norm) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[3].norm, 140581770188576) # norm_hidden_states, gate = self.norm(hidden_states, emb=temb) # diffusers/src/diffusers/models/transformers/transformer_flux.py:88 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].norm.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[3].norm.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].norm.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[3].norm.training, 140591004393440) # norm_hidden_states, gate = self.norm(hidden_states, emb=temb) # diffusers/src/diffusers/models/transformers/transformer_flux.py:88 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].norm._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].norm.norm, accessed_by=DictGetItemGuardAccessor(norm) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[3].norm.norm, 140581770188720) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:171 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].norm.norm.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].norm.norm.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[3].norm.norm.training, 140591004393440) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:171 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].norm.silu, accessed_by=DictGetItemGuardAccessor(silu) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[3].norm.silu, 140581770188624) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].norm.silu.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].norm.silu.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[3].norm.silu.training, 140591004393440) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].norm.linear, accessed_by=DictGetItemGuardAccessor(linear) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[3].norm.linear, 140533113668080) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].norm.linear.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[3].norm.linear.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].norm.linear.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[3].norm.linear.training, 140591004393408) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].norm.linear._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].norm.linear.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[3].norm.linear.lora_A, 140533113672064) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].norm.linear.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].norm.linear.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[3].norm.linear.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].norm.linear.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[3].norm.linear.lora_A['default_0'], 140533113680704) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].norm.linear.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].norm.linear.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[3].norm.linear.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].norm.linear.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].norm.linear.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[3].norm.linear.lora_A['default_0'].weight, 140537317337632) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].norm.linear.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[3].norm.linear.lora_B, 140533113675664) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].norm.linear.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].norm.linear.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[3].norm.linear.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].norm.linear.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[3].norm.linear.lora_B['default_0'], 140533113675568) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].norm.linear.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].norm.linear.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[3].norm.linear.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].norm.linear.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[3].norm.linear.base_layer, 140581770188672) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].norm.linear.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].norm.linear.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[3].norm.linear.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].norm.linear.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[3].norm.linear.lora_dropout, 140533113672976) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].norm.linear.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].norm.linear.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[3].norm.linear.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].norm.linear.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[3].norm.linear.lora_dropout['default_0'], 140533113681616) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].norm.linear.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].norm.linear.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[3].norm.linear.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].norm.linear.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[3].norm.linear.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[3].norm.linear.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].norm.linear.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[3].norm.linear.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].norm.linear.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[3].norm.linear.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[3].norm.linear.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].norm.linear.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[3].norm.linear.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].norm.linear._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].norm.linear._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].norm.linear.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[3].norm.linear.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[3].norm.linear.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].norm.linear._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[3].norm.linear._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].norm.linear._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].norm.linear._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].norm.linear._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[3].norm.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[3].norm.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].norm._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].norm._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].norm._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].norm._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].act_mlp, accessed_by=DictGetItemGuardAccessor(act_mlp) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[3].act_mlp, 140581770188816) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].act_mlp.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].act_mlp.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[3].act_mlp.training, 140591004393440) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].proj_mlp, accessed_by=DictGetItemGuardAccessor(proj_mlp) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[3].proj_mlp, 140533115434512) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].proj_mlp.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[3].proj_mlp.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].proj_mlp.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[3].proj_mlp.training, 140591004393408) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].proj_mlp._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].proj_mlp.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[3].proj_mlp.lora_A, 140533114276688) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].proj_mlp.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].proj_mlp.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[3].proj_mlp.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].proj_mlp.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[3].proj_mlp.lora_A['default_0'], 140533114275968) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].proj_mlp.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].proj_mlp.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[3].proj_mlp.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].proj_mlp.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].proj_mlp.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[3].proj_mlp.lora_A['default_0'].weight, 140537317334432) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].proj_mlp.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[3].proj_mlp.lora_B, 140533114276256) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].proj_mlp.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].proj_mlp.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[3].proj_mlp.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].proj_mlp.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[3].proj_mlp.lora_B['default_0'], 140533114275824) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].proj_mlp.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].proj_mlp.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[3].proj_mlp.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].proj_mlp.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[3].proj_mlp.base_layer, 140581770188768) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].proj_mlp.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].proj_mlp.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[3].proj_mlp.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].proj_mlp.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[3].proj_mlp.lora_dropout, 140533114289744) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].proj_mlp.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].proj_mlp.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[3].proj_mlp.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].proj_mlp.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[3].proj_mlp.lora_dropout['default_0'], 140533114274240) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].proj_mlp.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].proj_mlp.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[3].proj_mlp.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].proj_mlp.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[3].proj_mlp.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[3].proj_mlp.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].proj_mlp.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[3].proj_mlp.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].proj_mlp.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[3].proj_mlp.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[3].proj_mlp.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].proj_mlp.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[3].proj_mlp.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].proj_mlp._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].proj_mlp._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].proj_mlp.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[3].proj_mlp.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[3].proj_mlp.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].proj_mlp._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[3].proj_mlp._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].proj_mlp._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].proj_mlp._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].proj_mlp._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[3].proj_mlp._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[3].proj_mlp._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].proj_out, accessed_by=DictGetItemGuardAccessor(proj_out) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[3].proj_out, 140533114275392) # hidden_states = gate * self.proj_out(hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:98 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].proj_out.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[3].proj_out.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].proj_out.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[3].proj_out.training, 140591004393408) # hidden_states = gate * self.proj_out(hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:98 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].proj_out._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].proj_out.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[3].proj_out.lora_A, 140533114274528) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].proj_out.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].proj_out.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[3].proj_out.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].proj_out.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[3].proj_out.lora_A['default_0'], 140533113598208) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].proj_out.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].proj_out.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[3].proj_out.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].proj_out.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].proj_out.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[3].proj_out.lora_A['default_0'].weight, 140537317340032) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].proj_out.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[3].proj_out.lora_B, 140533114283120) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].proj_out.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].proj_out.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[3].proj_out.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].proj_out.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[3].proj_out.lora_B['default_0'], 140533113597920) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].proj_out.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].proj_out.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[3].proj_out.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].proj_out.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[3].proj_out.base_layer, 140581770188864) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].proj_out.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].proj_out.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[3].proj_out.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].proj_out.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[3].proj_out.lora_dropout, 140533114275440) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].proj_out.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].proj_out.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[3].proj_out.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].proj_out.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[3].proj_out.lora_dropout['default_0'], 140533114275584) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].proj_out.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].proj_out.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[3].proj_out.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].proj_out.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[3].proj_out.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[3].proj_out.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].proj_out.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[3].proj_out.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].proj_out.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[3].proj_out.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[3].proj_out.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].proj_out.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[3].proj_out.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].proj_out._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].proj_out._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].proj_out.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[3].proj_out.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[3].proj_out.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].proj_out._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[3].proj_out._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].proj_out._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].proj_out._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].proj_out._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[3].proj_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[3].proj_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4], accessed_by=GetItemGuardAccessor(4) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[4], 140581770188528) # for index_block, block in enumerate(self.single_transformer_blocks): # diffusers/src/diffusers/models/transformers/transformer_flux.py:509 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[4].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[4].training, 140591004393440) # for index_block, block in enumerate(self.single_transformer_blocks): # diffusers/src/diffusers/models/transformers/transformer_flux.py:509 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].attn, accessed_by=DictGetItemGuardAccessor(attn) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[4].attn, 140581770189728) # attn_output = self.attn( # diffusers/src/diffusers/models/transformers/transformer_flux.py:91 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].attn.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[4].attn.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].attn.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[4].attn.training, 140591004393440) # attn_output = self.attn( # diffusers/src/diffusers/models/transformers/transformer_flux.py:91 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].attn._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].attn.to_k, accessed_by=DictGetItemGuardAccessor(to_k) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[4].attn.to_k, 140533113769744) # key = attn.to_k(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1717 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].attn.to_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[4].attn.to_k.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].attn.to_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[4].attn.to_k.training, 140591004393408) # key = attn.to_k(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1717 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].attn.to_k._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].attn.to_k.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[4].attn.to_k.lora_A, 140533113768016) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].attn.to_k.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].attn.to_k.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[4].attn.to_k.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].attn.to_k.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[4].attn.to_k.lora_A['default_0'], 140533113770656) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].attn.to_k.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].attn.to_k.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[4].attn.to_k.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].attn.to_k.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].attn.to_k.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[4].attn.to_k.lora_A['default_0'].weight, 140537319293952) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].attn.to_k.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[4].attn.to_k.lora_B, 140533113769648) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].attn.to_k.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].attn.to_k.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[4].attn.to_k.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].attn.to_k.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[4].attn.to_k.lora_B['default_0'], 140533113770272) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].attn.to_k.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].attn.to_k.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[4].attn.to_k.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].attn.to_k.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[4].attn.to_k.base_layer, 140581770189872) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].attn.to_k.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].attn.to_k.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[4].attn.to_k.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].attn.to_k.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[4].attn.to_k.lora_dropout, 140533113773056) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].attn.to_k.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].attn.to_k.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[4].attn.to_k.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].attn.to_k.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[4].attn.to_k.lora_dropout['default_0'], 140533113770608) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].attn.to_k.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].attn.to_k.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[4].attn.to_k.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].attn.to_k.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[4].attn.to_k.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[4].attn.to_k.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].attn.to_k.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[4].attn.to_k.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].attn.to_k.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[4].attn.to_k.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[4].attn.to_k.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].attn.to_k.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[4].attn.to_k.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].attn.to_k._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].attn.to_k._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].attn.to_k.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[4].attn.to_k.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[4].attn.to_k.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].attn.to_k._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[4].attn.to_k._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].attn.to_k._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].attn.to_k._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].attn.to_k._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[4].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[4].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].attn.to_q, accessed_by=DictGetItemGuardAccessor(to_q) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[4].attn.to_q, 140533113777952) # query = attn.to_q(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1716 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].attn.to_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[4].attn.to_q.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].attn.to_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[4].attn.to_q.training, 140591004393408) # query = attn.to_q(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1716 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].attn.to_q._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].attn.to_q.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[4].attn.to_q.lora_A, 140533113772960) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].attn.to_q.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].attn.to_q.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[4].attn.to_q.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].attn.to_q.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[4].attn.to_q.lora_A['default_0'], 140533113773008) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].attn.to_q.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].attn.to_q.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[4].attn.to_q.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].attn.to_q.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].attn.to_q.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[4].attn.to_q.lora_A['default_0'].weight, 140537319301712) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].attn.to_q.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[4].attn.to_q.lora_B, 140533113781264) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].attn.to_q.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].attn.to_q.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[4].attn.to_q.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].attn.to_q.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[4].attn.to_q.lora_B['default_0'], 140533113775360) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].attn.to_q.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].attn.to_q.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[4].attn.to_q.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].attn.to_q.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[4].attn.to_q.base_layer, 140581770189968) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].attn.to_q.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].attn.to_q.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[4].attn.to_q.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].attn.to_q.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[4].attn.to_q.lora_dropout, 140533113778144) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].attn.to_q.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].attn.to_q.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[4].attn.to_q.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].attn.to_q.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[4].attn.to_q.lora_dropout['default_0'], 140533113777280) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].attn.to_q.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].attn.to_q.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[4].attn.to_q.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].attn.to_q.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[4].attn.to_q.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[4].attn.to_q.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].attn.to_q.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[4].attn.to_q.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].attn.to_q.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[4].attn.to_q.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[4].attn.to_q.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].attn.to_q.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[4].attn.to_q.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].attn.to_q._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].attn.to_q._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].attn.to_q.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[4].attn.to_q.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[4].attn.to_q.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].attn.to_q._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[4].attn.to_q._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].attn.to_q._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].attn.to_q._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].attn.to_q._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[4].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[4].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].attn.to_v, accessed_by=DictGetItemGuardAccessor(to_v) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[4].attn.to_v, 140533113774784) # value = attn.to_v(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1718 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].attn.to_v.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[4].attn.to_v.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].attn.to_v.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[4].attn.to_v.training, 140591004393408) # value = attn.to_v(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1718 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].attn.to_v._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].attn.to_v.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[4].attn.to_v.lora_A, 140533113773536) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].attn.to_v.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].attn.to_v.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[4].attn.to_v.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].attn.to_v.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[4].attn.to_v.lora_A['default_0'], 140533113780688) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].attn.to_v.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].attn.to_v.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[4].attn.to_v.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].attn.to_v.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].attn.to_v.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[4].attn.to_v.lora_A['default_0'].weight, 140537319151536) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].attn.to_v.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[4].attn.to_v.lora_B, 140533113771232) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].attn.to_v.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].attn.to_v.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[4].attn.to_v.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].attn.to_v.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[4].attn.to_v.lora_B['default_0'], 140533113780832) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].attn.to_v.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].attn.to_v.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[4].attn.to_v.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].attn.to_v.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[4].attn.to_v.base_layer, 140581770190016) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].attn.to_v.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].attn.to_v.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[4].attn.to_v.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].attn.to_v.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[4].attn.to_v.lora_dropout, 140533113772528) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].attn.to_v.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].attn.to_v.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[4].attn.to_v.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].attn.to_v.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[4].attn.to_v.lora_dropout['default_0'], 140533113774928) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].attn.to_v.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].attn.to_v.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[4].attn.to_v.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].attn.to_v.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[4].attn.to_v.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[4].attn.to_v.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].attn.to_v.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[4].attn.to_v.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].attn.to_v.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[4].attn.to_v.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[4].attn.to_v.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].attn.to_v.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[4].attn.to_v.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].attn.to_v._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].attn.to_v._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].attn.to_v.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[4].attn.to_v.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[4].attn.to_v.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].attn.to_v._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[4].attn.to_v._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].attn.to_v._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].attn.to_v._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].attn.to_v._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[4].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[4].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].attn.norm_k, accessed_by=DictGetItemGuardAccessor(norm_k) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[4].attn.norm_k, 140581770189920) # if attn.norm_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1729 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].attn.norm_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[4].attn.norm_k.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].attn.norm_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[4].attn.norm_k.training, 140591004393440) # if attn.norm_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1729 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].attn.norm_k.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[4].attn.norm_k.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].attn.norm_k._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].attn.norm_k.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[4].attn.norm_k.weight, 140581772771952) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].attn.norm_k._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].attn.norm_k._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].attn.norm_k._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].attn.norm_k._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].attn.norm_q, accessed_by=DictGetItemGuardAccessor(norm_q) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[4].attn.norm_q, 140581770189776) # if attn.norm_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1727 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].attn.norm_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[4].attn.norm_q.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].attn.norm_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[4].attn.norm_q.training, 140591004393440) # if attn.norm_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1727 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].attn.norm_q.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[4].attn.norm_q.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].attn.norm_q._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].attn.norm_q.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[4].attn.norm_q.weight, 140581783349632) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].attn.norm_q._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].attn.norm_q._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].attn.norm_q._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].attn.norm_q._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].attn.heads, accessed_by=DictGetItemGuardAccessor(heads) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[4].attn.heads == 24 # head_dim = inner_dim // attn.heads # diffusers/src/diffusers/models/attention_processor.py:1721 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].attn.processor, accessed_by=DictGetItemGuardAccessor(processor) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[4].attn.processor, 93831581524080) # attn_parameters = set(inspect.signature(self.processor.__call__).parameters.keys()) # diffusers/src/diffusers/models/attention_processor.py:479 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[4].attn.processor, 140581770189680) # return self.processor( # diffusers/src/diffusers/models/attention_processor.py:490 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].attn._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].attn._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].attn._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].attn._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].attn.forward, accessed_by=GetAttrGuardAccessor(forward) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].attn.forward, accessed_by=FuncDefaultsGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].attn.forward.__defaults__[0], accessed_by=GetItemGuardAccessor(0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[4].attn.forward.__defaults__[0], 140591004478624) # batch_size, _, _ = hidden_states.shape if encoder_hidden_states is None else encoder_hidden_states.shape # diffusers/src/diffusers/models/attention_processor.py:1713 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].norm, accessed_by=DictGetItemGuardAccessor(norm) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[4].norm, 140581770189344) # norm_hidden_states, gate = self.norm(hidden_states, emb=temb) # diffusers/src/diffusers/models/transformers/transformer_flux.py:88 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].norm.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[4].norm.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].norm.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[4].norm.training, 140591004393440) # norm_hidden_states, gate = self.norm(hidden_states, emb=temb) # diffusers/src/diffusers/models/transformers/transformer_flux.py:88 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].norm._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].norm.norm, accessed_by=DictGetItemGuardAccessor(norm) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[4].norm.norm, 140581770189488) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:171 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].norm.norm.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].norm.norm.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[4].norm.norm.training, 140591004393440) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:171 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].norm.silu, accessed_by=DictGetItemGuardAccessor(silu) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[4].norm.silu, 140581770189392) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].norm.silu.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].norm.silu.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[4].norm.silu.training, 140591004393440) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].norm.linear, accessed_by=DictGetItemGuardAccessor(linear) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[4].norm.linear, 140533114516000) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].norm.linear.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[4].norm.linear.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].norm.linear.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[4].norm.linear.training, 140591004393408) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].norm.linear._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].norm.linear.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[4].norm.linear.lora_A, 140533114519168) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].norm.linear.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].norm.linear.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[4].norm.linear.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].norm.linear.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[4].norm.linear.lora_A['default_0'], 140533114436576) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].norm.linear.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].norm.linear.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[4].norm.linear.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].norm.linear.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].norm.linear.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[4].norm.linear.lora_A['default_0'].weight, 140537319295312) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].norm.linear.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[4].norm.linear.lora_B, 140533114432256) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].norm.linear.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].norm.linear.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[4].norm.linear.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].norm.linear.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[4].norm.linear.lora_B['default_0'], 140533114424816) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].norm.linear.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].norm.linear.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[4].norm.linear.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].norm.linear.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[4].norm.linear.base_layer, 140581770189440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].norm.linear.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].norm.linear.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[4].norm.linear.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].norm.linear.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[4].norm.linear.lora_dropout, 140533114517584) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].norm.linear.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].norm.linear.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[4].norm.linear.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].norm.linear.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[4].norm.linear.lora_dropout['default_0'], 140533114512832) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].norm.linear.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].norm.linear.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[4].norm.linear.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].norm.linear.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[4].norm.linear.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[4].norm.linear.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].norm.linear.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[4].norm.linear.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].norm.linear.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[4].norm.linear.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[4].norm.linear.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].norm.linear.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[4].norm.linear.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].norm.linear._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].norm.linear._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].norm.linear.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[4].norm.linear.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[4].norm.linear.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].norm.linear._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[4].norm.linear._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].norm.linear._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].norm.linear._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].norm.linear._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[4].norm.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[4].norm.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].norm._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].norm._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].norm._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].norm._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].act_mlp, accessed_by=DictGetItemGuardAccessor(act_mlp) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[4].act_mlp, 140581770189584) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].act_mlp.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].act_mlp.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[4].act_mlp.training, 140591004393440) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].proj_mlp, accessed_by=DictGetItemGuardAccessor(proj_mlp) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[4].proj_mlp, 140533114434176) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].proj_mlp.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[4].proj_mlp.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].proj_mlp.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[4].proj_mlp.training, 140591004393408) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].proj_mlp._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].proj_mlp.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[4].proj_mlp.lora_A, 140533114434080) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].proj_mlp.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].proj_mlp.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[4].proj_mlp.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].proj_mlp.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[4].proj_mlp.lora_A['default_0'], 140533113773824) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].proj_mlp.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].proj_mlp.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[4].proj_mlp.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].proj_mlp.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].proj_mlp.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[4].proj_mlp.lora_A['default_0'].weight, 140537319303312) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].proj_mlp.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[4].proj_mlp.lora_B, 140533114433456) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].proj_mlp.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].proj_mlp.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[4].proj_mlp.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].proj_mlp.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[4].proj_mlp.lora_B['default_0'], 140533113776080) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].proj_mlp.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].proj_mlp.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[4].proj_mlp.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].proj_mlp.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[4].proj_mlp.base_layer, 140581770189536) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].proj_mlp.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].proj_mlp.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[4].proj_mlp.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].proj_mlp.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[4].proj_mlp.lora_dropout, 140533114422128) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].proj_mlp.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].proj_mlp.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[4].proj_mlp.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].proj_mlp.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[4].proj_mlp.lora_dropout['default_0'], 140533114437152) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].proj_mlp.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].proj_mlp.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[4].proj_mlp.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].proj_mlp.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[4].proj_mlp.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[4].proj_mlp.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].proj_mlp.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[4].proj_mlp.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].proj_mlp.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[4].proj_mlp.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[4].proj_mlp.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].proj_mlp.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[4].proj_mlp.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].proj_mlp._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].proj_mlp._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].proj_mlp.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[4].proj_mlp.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[4].proj_mlp.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].proj_mlp._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[4].proj_mlp._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].proj_mlp._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].proj_mlp._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].proj_mlp._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[4].proj_mlp._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[4].proj_mlp._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].proj_out, accessed_by=DictGetItemGuardAccessor(proj_out) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[4].proj_out, 140533113775888) # hidden_states = gate * self.proj_out(hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:98 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].proj_out.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[4].proj_out.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].proj_out.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[4].proj_out.training, 140591004393408) # hidden_states = gate * self.proj_out(hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:98 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].proj_out._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].proj_out.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[4].proj_out.lora_A, 140533113775840) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].proj_out.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].proj_out.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[4].proj_out.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].proj_out.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[4].proj_out.lora_A['default_0'], 140533113777712) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].proj_out.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].proj_out.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[4].proj_out.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].proj_out.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].proj_out.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[4].proj_out.lora_A['default_0'].weight, 140537319306192) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].proj_out.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[4].proj_out.lora_B, 140533113775696) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].proj_out.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].proj_out.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[4].proj_out.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].proj_out.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[4].proj_out.lora_B['default_0'], 140533113775168) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].proj_out.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].proj_out.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[4].proj_out.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].proj_out.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[4].proj_out.base_layer, 140581770189632) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].proj_out.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].proj_out.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[4].proj_out.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].proj_out.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[4].proj_out.lora_dropout, 140533113775936) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].proj_out.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].proj_out.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[4].proj_out.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].proj_out.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[4].proj_out.lora_dropout['default_0'], 140533113775024) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].proj_out.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].proj_out.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[4].proj_out.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].proj_out.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[4].proj_out.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[4].proj_out.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].proj_out.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[4].proj_out.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].proj_out.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[4].proj_out.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[4].proj_out.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].proj_out.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[4].proj_out.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].proj_out._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].proj_out._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].proj_out.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[4].proj_out.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[4].proj_out.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].proj_out._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[4].proj_out._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].proj_out._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].proj_out._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].proj_out._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[4].proj_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[4].proj_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5], accessed_by=GetItemGuardAccessor(5) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[5], 140581770189296) # for index_block, block in enumerate(self.single_transformer_blocks): # diffusers/src/diffusers/models/transformers/transformer_flux.py:509 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[5].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[5].training, 140591004393440) # for index_block, block in enumerate(self.single_transformer_blocks): # diffusers/src/diffusers/models/transformers/transformer_flux.py:509 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].attn, accessed_by=DictGetItemGuardAccessor(attn) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[5].attn, 140581770190496) # attn_output = self.attn( # diffusers/src/diffusers/models/transformers/transformer_flux.py:91 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].attn.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[5].attn.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].attn.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[5].attn.training, 140591004393440) # attn_output = self.attn( # diffusers/src/diffusers/models/transformers/transformer_flux.py:91 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].attn._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].attn.to_k, accessed_by=DictGetItemGuardAccessor(to_k) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[5].attn.to_k, 140533114308832) # key = attn.to_k(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1717 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].attn.to_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[5].attn.to_k.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].attn.to_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[5].attn.to_k.training, 140591004393408) # key = attn.to_k(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1717 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].attn.to_k._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].attn.to_k.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[5].attn.to_k.lora_A, 140533113915760) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].attn.to_k.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].attn.to_k.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[5].attn.to_k.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].attn.to_k.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[5].attn.to_k.lora_A['default_0'], 140533113928816) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].attn.to_k.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].attn.to_k.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[5].attn.to_k.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].attn.to_k.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].attn.to_k.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[5].attn.to_k.lora_A['default_0'].weight, 140537319010480) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].attn.to_k.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[5].attn.to_k.lora_B, 140533113923680) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].attn.to_k.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].attn.to_k.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[5].attn.to_k.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].attn.to_k.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[5].attn.to_k.lora_B['default_0'], 140533113924592) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].attn.to_k.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].attn.to_k.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[5].attn.to_k.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].attn.to_k.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[5].attn.to_k.base_layer, 140581770190640) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].attn.to_k.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].attn.to_k.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[5].attn.to_k.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].attn.to_k.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[5].attn.to_k.lora_dropout, 140533113929392) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].attn.to_k.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].attn.to_k.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[5].attn.to_k.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].attn.to_k.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[5].attn.to_k.lora_dropout['default_0'], 140533113924256) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].attn.to_k.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].attn.to_k.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[5].attn.to_k.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].attn.to_k.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[5].attn.to_k.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[5].attn.to_k.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].attn.to_k.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[5].attn.to_k.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].attn.to_k.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[5].attn.to_k.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[5].attn.to_k.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].attn.to_k.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[5].attn.to_k.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].attn.to_k._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].attn.to_k._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].attn.to_k.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[5].attn.to_k.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[5].attn.to_k.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].attn.to_k._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[5].attn.to_k._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].attn.to_k._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].attn.to_k._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].attn.to_k._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[5].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[5].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].attn.to_q, accessed_by=DictGetItemGuardAccessor(to_q) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[5].attn.to_q, 140533115182608) # query = attn.to_q(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1716 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].attn.to_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[5].attn.to_q.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].attn.to_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[5].attn.to_q.training, 140591004393408) # query = attn.to_q(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1716 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].attn.to_q._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].attn.to_q.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[5].attn.to_q.lora_A, 140533115186496) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].attn.to_q.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].attn.to_q.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[5].attn.to_q.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].attn.to_q.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[5].attn.to_q.lora_A['default_0'], 140533114313248) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].attn.to_q.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].attn.to_q.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[5].attn.to_q.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].attn.to_q.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].attn.to_q.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[5].attn.to_q.lora_A['default_0'].weight, 140537319150896) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].attn.to_q.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[5].attn.to_q.lora_B, 140533115175744) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].attn.to_q.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].attn.to_q.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[5].attn.to_q.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].attn.to_q.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[5].attn.to_q.lora_B['default_0'], 140533114321504) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].attn.to_q.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].attn.to_q.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[5].attn.to_q.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].attn.to_q.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[5].attn.to_q.base_layer, 140581770190736) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].attn.to_q.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].attn.to_q.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[5].attn.to_q.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].attn.to_q.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[5].attn.to_q.lora_dropout, 140533115189760) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].attn.to_q.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].attn.to_q.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[5].attn.to_q.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].attn.to_q.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[5].attn.to_q.lora_dropout['default_0'], 140533115189952) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].attn.to_q.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].attn.to_q.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[5].attn.to_q.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].attn.to_q.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[5].attn.to_q.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[5].attn.to_q.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].attn.to_q.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[5].attn.to_q.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].attn.to_q.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[5].attn.to_q.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[5].attn.to_q.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].attn.to_q.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[5].attn.to_q.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].attn.to_q._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].attn.to_q._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].attn.to_q.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[5].attn.to_q.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[5].attn.to_q.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].attn.to_q._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[5].attn.to_q._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].attn.to_q._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].attn.to_q._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].attn.to_q._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[5].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[5].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].attn.to_v, accessed_by=DictGetItemGuardAccessor(to_v) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[5].attn.to_v, 140533114120912) # value = attn.to_v(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1718 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].attn.to_v.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[5].attn.to_v.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].attn.to_v.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[5].attn.to_v.training, 140591004393408) # value = attn.to_v(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1718 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].attn.to_v._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].attn.to_v.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[5].attn.to_v.lora_A, 140533114122112) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].attn.to_v.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].attn.to_v.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[5].attn.to_v.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].attn.to_v.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[5].attn.to_v.lora_A['default_0'], 140533114552224) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].attn.to_v.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].attn.to_v.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[5].attn.to_v.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].attn.to_v.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].attn.to_v.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[5].attn.to_v.lora_A['default_0'].weight, 140537319003840) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].attn.to_v.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[5].attn.to_v.lora_B, 140533114123360) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].attn.to_v.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].attn.to_v.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[5].attn.to_v.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].attn.to_v.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[5].attn.to_v.lora_B['default_0'], 140533114539408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].attn.to_v.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].attn.to_v.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[5].attn.to_v.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].attn.to_v.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[5].attn.to_v.base_layer, 140581770190784) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].attn.to_v.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].attn.to_v.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[5].attn.to_v.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].attn.to_v.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[5].attn.to_v.lora_dropout, 140533114119088) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].attn.to_v.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].attn.to_v.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[5].attn.to_v.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].attn.to_v.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[5].attn.to_v.lora_dropout['default_0'], 140533114124224) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].attn.to_v.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].attn.to_v.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[5].attn.to_v.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].attn.to_v.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[5].attn.to_v.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[5].attn.to_v.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].attn.to_v.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[5].attn.to_v.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].attn.to_v.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[5].attn.to_v.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[5].attn.to_v.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].attn.to_v.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[5].attn.to_v.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].attn.to_v._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].attn.to_v._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].attn.to_v.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[5].attn.to_v.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[5].attn.to_v.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].attn.to_v._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[5].attn.to_v._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].attn.to_v._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].attn.to_v._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].attn.to_v._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[5].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[5].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].attn.norm_k, accessed_by=DictGetItemGuardAccessor(norm_k) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[5].attn.norm_k, 140581770190688) # if attn.norm_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1729 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].attn.norm_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[5].attn.norm_k.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].attn.norm_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[5].attn.norm_k.training, 140591004393440) # if attn.norm_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1729 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].attn.norm_k.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[5].attn.norm_k.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].attn.norm_k._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].attn.norm_k.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[5].attn.norm_k.weight, 140581772502624) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].attn.norm_k._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].attn.norm_k._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].attn.norm_k._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].attn.norm_k._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].attn.norm_q, accessed_by=DictGetItemGuardAccessor(norm_q) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[5].attn.norm_q, 140581770190544) # if attn.norm_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1727 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].attn.norm_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[5].attn.norm_q.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].attn.norm_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[5].attn.norm_q.training, 140591004393440) # if attn.norm_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1727 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].attn.norm_q.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[5].attn.norm_q.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].attn.norm_q._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].attn.norm_q.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[5].attn.norm_q.weight, 140581783352032) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].attn.norm_q._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].attn.norm_q._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].attn.norm_q._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].attn.norm_q._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].attn.heads, accessed_by=DictGetItemGuardAccessor(heads) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[5].attn.heads == 24 # head_dim = inner_dim // attn.heads # diffusers/src/diffusers/models/attention_processor.py:1721 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].attn.processor, accessed_by=DictGetItemGuardAccessor(processor) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[5].attn.processor, 93831581524080) # attn_parameters = set(inspect.signature(self.processor.__call__).parameters.keys()) # diffusers/src/diffusers/models/attention_processor.py:479 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[5].attn.processor, 140581770190448) # return self.processor( # diffusers/src/diffusers/models/attention_processor.py:490 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].attn._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].attn._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].attn._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].attn._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].attn.forward, accessed_by=GetAttrGuardAccessor(forward) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].attn.forward, accessed_by=FuncDefaultsGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].attn.forward.__defaults__[0], accessed_by=GetItemGuardAccessor(0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[5].attn.forward.__defaults__[0], 140591004478624) # batch_size, _, _ = hidden_states.shape if encoder_hidden_states is None else encoder_hidden_states.shape # diffusers/src/diffusers/models/attention_processor.py:1713 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].norm, accessed_by=DictGetItemGuardAccessor(norm) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[5].norm, 140581770190112) # norm_hidden_states, gate = self.norm(hidden_states, emb=temb) # diffusers/src/diffusers/models/transformers/transformer_flux.py:88 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].norm.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[5].norm.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].norm.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[5].norm.training, 140591004393440) # norm_hidden_states, gate = self.norm(hidden_states, emb=temb) # diffusers/src/diffusers/models/transformers/transformer_flux.py:88 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].norm._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].norm.norm, accessed_by=DictGetItemGuardAccessor(norm) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[5].norm.norm, 140581770190256) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:171 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].norm.norm.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].norm.norm.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[5].norm.norm.training, 140591004393440) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:171 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].norm.silu, accessed_by=DictGetItemGuardAccessor(silu) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[5].norm.silu, 140581770190160) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].norm.silu.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].norm.silu.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[5].norm.silu.training, 140591004393440) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].norm.linear, accessed_by=DictGetItemGuardAccessor(linear) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[5].norm.linear, 140533113771136) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].norm.linear.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[5].norm.linear.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].norm.linear.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[5].norm.linear.training, 140591004393408) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].norm.linear._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].norm.linear.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[5].norm.linear.lora_A, 140533113772192) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].norm.linear.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].norm.linear.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[5].norm.linear.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].norm.linear.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[5].norm.linear.lora_A['default_0'], 140533115692144) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].norm.linear.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].norm.linear.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[5].norm.linear.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].norm.linear.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].norm.linear.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[5].norm.linear.lora_A['default_0'].weight, 140537319159856) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].norm.linear.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[5].norm.linear.lora_B, 140533113776464) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].norm.linear.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].norm.linear.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[5].norm.linear.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].norm.linear.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[5].norm.linear.lora_B['default_0'], 140533115696560) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].norm.linear.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].norm.linear.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[5].norm.linear.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].norm.linear.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[5].norm.linear.base_layer, 140581770190208) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].norm.linear.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].norm.linear.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[5].norm.linear.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].norm.linear.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[5].norm.linear.lora_dropout, 140533113774208) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].norm.linear.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].norm.linear.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[5].norm.linear.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].norm.linear.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[5].norm.linear.lora_dropout['default_0'], 140533113773200) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].norm.linear.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].norm.linear.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[5].norm.linear.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].norm.linear.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[5].norm.linear.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[5].norm.linear.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].norm.linear.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[5].norm.linear.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].norm.linear.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[5].norm.linear.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[5].norm.linear.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].norm.linear.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[5].norm.linear.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].norm.linear._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].norm.linear._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].norm.linear.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[5].norm.linear.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[5].norm.linear.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].norm.linear._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[5].norm.linear._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].norm.linear._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].norm.linear._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].norm.linear._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[5].norm.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[5].norm.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].norm._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].norm._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].norm._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].norm._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].act_mlp, accessed_by=DictGetItemGuardAccessor(act_mlp) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[5].act_mlp, 140581770190352) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].act_mlp.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].act_mlp.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[5].act_mlp.training, 140591004393440) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].proj_mlp, accessed_by=DictGetItemGuardAccessor(proj_mlp) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[5].proj_mlp, 140533114395696) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].proj_mlp.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[5].proj_mlp.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].proj_mlp.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[5].proj_mlp.training, 140591004393408) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].proj_mlp._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].proj_mlp.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[5].proj_mlp.lora_A, 140533115175408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].proj_mlp.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].proj_mlp.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[5].proj_mlp.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].proj_mlp.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[5].proj_mlp.lora_A['default_0'], 140533115176608) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].proj_mlp.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].proj_mlp.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[5].proj_mlp.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].proj_mlp.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].proj_mlp.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[5].proj_mlp.lora_A['default_0'].weight, 140537319157856) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].proj_mlp.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[5].proj_mlp.lora_B, 140533115176656) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].proj_mlp.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].proj_mlp.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[5].proj_mlp.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].proj_mlp.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[5].proj_mlp.lora_B['default_0'], 140533115185104) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].proj_mlp.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].proj_mlp.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[5].proj_mlp.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].proj_mlp.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[5].proj_mlp.base_layer, 140581770190304) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].proj_mlp.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].proj_mlp.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[5].proj_mlp.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].proj_mlp.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[5].proj_mlp.lora_dropout, 140533115175552) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].proj_mlp.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].proj_mlp.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[5].proj_mlp.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].proj_mlp.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[5].proj_mlp.lora_dropout['default_0'], 140533115175216) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].proj_mlp.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].proj_mlp.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[5].proj_mlp.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].proj_mlp.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[5].proj_mlp.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[5].proj_mlp.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].proj_mlp.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[5].proj_mlp.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].proj_mlp.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[5].proj_mlp.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[5].proj_mlp.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].proj_mlp.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[5].proj_mlp.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].proj_mlp._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].proj_mlp._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].proj_mlp.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[5].proj_mlp.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[5].proj_mlp.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].proj_mlp._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[5].proj_mlp._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].proj_mlp._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].proj_mlp._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].proj_mlp._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[5].proj_mlp._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[5].proj_mlp._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].proj_out, accessed_by=DictGetItemGuardAccessor(proj_out) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[5].proj_out, 140533115182512) # hidden_states = gate * self.proj_out(hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:98 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].proj_out.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[5].proj_out.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].proj_out.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[5].proj_out.training, 140591004393408) # hidden_states = gate * self.proj_out(hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:98 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].proj_out._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].proj_out.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[5].proj_out.lora_A, 140533115184912) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].proj_out.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].proj_out.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[5].proj_out.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].proj_out.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[5].proj_out.lora_A['default_0'], 140533115177184) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].proj_out.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].proj_out.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[5].proj_out.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].proj_out.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].proj_out.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[5].proj_out.lora_A['default_0'].weight, 140537319157616) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].proj_out.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[5].proj_out.lora_B, 140533115181072) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].proj_out.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].proj_out.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[5].proj_out.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].proj_out.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[5].proj_out.lora_B['default_0'], 140533115177328) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].proj_out.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].proj_out.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[5].proj_out.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].proj_out.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[5].proj_out.base_layer, 140581770190400) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].proj_out.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].proj_out.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[5].proj_out.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].proj_out.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[5].proj_out.lora_dropout, 140533115182032) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].proj_out.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].proj_out.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[5].proj_out.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].proj_out.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[5].proj_out.lora_dropout['default_0'], 140533115180784) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].proj_out.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].proj_out.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[5].proj_out.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].proj_out.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[5].proj_out.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[5].proj_out.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].proj_out.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[5].proj_out.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].proj_out.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[5].proj_out.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[5].proj_out.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].proj_out.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[5].proj_out.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].proj_out._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].proj_out._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].proj_out.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[5].proj_out.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[5].proj_out.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].proj_out._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[5].proj_out._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].proj_out._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].proj_out._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].proj_out._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[5].proj_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[5].proj_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6], accessed_by=GetItemGuardAccessor(6) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[6], 140581770190064) # for index_block, block in enumerate(self.single_transformer_blocks): # diffusers/src/diffusers/models/transformers/transformer_flux.py:509 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[6].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[6].training, 140591004393440) # for index_block, block in enumerate(self.single_transformer_blocks): # diffusers/src/diffusers/models/transformers/transformer_flux.py:509 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].attn, accessed_by=DictGetItemGuardAccessor(attn) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[6].attn, 140581770191264) # attn_output = self.attn( # diffusers/src/diffusers/models/transformers/transformer_flux.py:91 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].attn.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[6].attn.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].attn.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[6].attn.training, 140591004393440) # attn_output = self.attn( # diffusers/src/diffusers/models/transformers/transformer_flux.py:91 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].attn._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].attn.to_k, accessed_by=DictGetItemGuardAccessor(to_k) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[6].attn.to_k, 140533114597488) # key = attn.to_k(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1717 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].attn.to_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[6].attn.to_k.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].attn.to_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[6].attn.to_k.training, 140591004393408) # key = attn.to_k(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1717 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].attn.to_k._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].attn.to_k.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[6].attn.to_k.lora_A, 140533112544496) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].attn.to_k.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].attn.to_k.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[6].attn.to_k.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].attn.to_k.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[6].attn.to_k.lora_A['default_0'], 140533112539408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].attn.to_k.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].attn.to_k.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[6].attn.to_k.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].attn.to_k.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].attn.to_k.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[6].attn.to_k.lora_A['default_0'].weight, 140537318886512) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].attn.to_k.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[6].attn.to_k.lora_B, 140533112548624) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].attn.to_k.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].attn.to_k.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[6].attn.to_k.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].attn.to_k.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[6].attn.to_k.lora_B['default_0'], 140533112543152) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].attn.to_k.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].attn.to_k.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[6].attn.to_k.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].attn.to_k.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[6].attn.to_k.base_layer, 140581770191408) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].attn.to_k.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].attn.to_k.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[6].attn.to_k.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].attn.to_k.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[6].attn.to_k.lora_dropout, 140533112540320) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].attn.to_k.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].attn.to_k.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[6].attn.to_k.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].attn.to_k.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[6].attn.to_k.lora_dropout['default_0'], 140533114594176) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].attn.to_k.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].attn.to_k.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[6].attn.to_k.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].attn.to_k.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[6].attn.to_k.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[6].attn.to_k.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].attn.to_k.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[6].attn.to_k.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].attn.to_k.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[6].attn.to_k.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[6].attn.to_k.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].attn.to_k.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[6].attn.to_k.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].attn.to_k._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].attn.to_k._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].attn.to_k.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[6].attn.to_k.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[6].attn.to_k.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].attn.to_k._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[6].attn.to_k._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].attn.to_k._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].attn.to_k._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].attn.to_k._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[6].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[6].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].attn.to_q, accessed_by=DictGetItemGuardAccessor(to_q) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[6].attn.to_q, 140533114590864) # query = attn.to_q(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1716 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].attn.to_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[6].attn.to_q.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].attn.to_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[6].attn.to_q.training, 140591004393408) # query = attn.to_q(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1716 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].attn.to_q._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].attn.to_q.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[6].attn.to_q.lora_A, 140533114590720) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].attn.to_q.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].attn.to_q.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[6].attn.to_q.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].attn.to_q.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[6].attn.to_q.lora_A['default_0'], 140533114586784) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].attn.to_q.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].attn.to_q.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[6].attn.to_q.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].attn.to_q.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].attn.to_q.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[6].attn.to_q.lora_A['default_0'].weight, 140537319003600) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].attn.to_q.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[6].attn.to_q.lora_B, 140533114587216) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].attn.to_q.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].attn.to_q.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[6].attn.to_q.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].attn.to_q.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[6].attn.to_q.lora_B['default_0'], 140533114586880) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].attn.to_q.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].attn.to_q.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[6].attn.to_q.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].attn.to_q.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[6].attn.to_q.base_layer, 140581770191504) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].attn.to_q.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].attn.to_q.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[6].attn.to_q.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].attn.to_q.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[6].attn.to_q.lora_dropout, 140533114590336) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].attn.to_q.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].attn.to_q.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[6].attn.to_q.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].attn.to_q.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[6].attn.to_q.lora_dropout['default_0'], 140533114590768) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].attn.to_q.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].attn.to_q.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[6].attn.to_q.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].attn.to_q.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[6].attn.to_q.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[6].attn.to_q.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].attn.to_q.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[6].attn.to_q.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].attn.to_q.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[6].attn.to_q.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[6].attn.to_q.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].attn.to_q.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[6].attn.to_q.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].attn.to_q._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].attn.to_q._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].attn.to_q.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[6].attn.to_q.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[6].attn.to_q.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].attn.to_q._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[6].attn.to_q._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].attn.to_q._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].attn.to_q._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].attn.to_q._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[6].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[6].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].attn.to_v, accessed_by=DictGetItemGuardAccessor(to_v) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[6].attn.to_v, 140533112540272) # value = attn.to_v(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1718 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].attn.to_v.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[6].attn.to_v.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].attn.to_v.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[6].attn.to_v.training, 140591004393408) # value = attn.to_v(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1718 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].attn.to_v._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].attn.to_v.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[6].attn.to_v.lora_A, 140533112549776) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].attn.to_v.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].attn.to_v.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[6].attn.to_v.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].attn.to_v.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[6].attn.to_v.lora_A['default_0'], 140533112551888) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].attn.to_v.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].attn.to_v.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[6].attn.to_v.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].attn.to_v.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].attn.to_v.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[6].attn.to_v.lora_A['default_0'].weight, 140537318883792) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].attn.to_v.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[6].attn.to_v.lora_B, 140533112552656) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].attn.to_v.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].attn.to_v.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[6].attn.to_v.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].attn.to_v.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[6].attn.to_v.lora_B['default_0'], 140533112551840) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].attn.to_v.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].attn.to_v.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[6].attn.to_v.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].attn.to_v.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[6].attn.to_v.base_layer, 140581770191552) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].attn.to_v.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].attn.to_v.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[6].attn.to_v.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].attn.to_v.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[6].attn.to_v.lora_dropout, 140533112538688) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].attn.to_v.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].attn.to_v.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[6].attn.to_v.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].attn.to_v.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[6].attn.to_v.lora_dropout['default_0'], 140533112541232) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].attn.to_v.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].attn.to_v.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[6].attn.to_v.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].attn.to_v.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[6].attn.to_v.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[6].attn.to_v.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].attn.to_v.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[6].attn.to_v.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].attn.to_v.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[6].attn.to_v.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[6].attn.to_v.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].attn.to_v.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[6].attn.to_v.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].attn.to_v._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].attn.to_v._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].attn.to_v.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[6].attn.to_v.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[6].attn.to_v.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].attn.to_v._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[6].attn.to_v._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].attn.to_v._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].attn.to_v._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].attn.to_v._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[6].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[6].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].attn.norm_k, accessed_by=DictGetItemGuardAccessor(norm_k) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[6].attn.norm_k, 140581770191456) # if attn.norm_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1729 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].attn.norm_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[6].attn.norm_k.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].attn.norm_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[6].attn.norm_k.training, 140591004393440) # if attn.norm_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1729 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].attn.norm_k.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[6].attn.norm_k.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].attn.norm_k._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].attn.norm_k.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[6].attn.norm_k.weight, 140581773356832) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].attn.norm_k._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].attn.norm_k._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].attn.norm_k._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].attn.norm_k._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].attn.norm_q, accessed_by=DictGetItemGuardAccessor(norm_q) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[6].attn.norm_q, 140581770191312) # if attn.norm_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1727 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].attn.norm_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[6].attn.norm_q.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].attn.norm_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[6].attn.norm_q.training, 140591004393440) # if attn.norm_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1727 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].attn.norm_q.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[6].attn.norm_q.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].attn.norm_q._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].attn.norm_q.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[6].attn.norm_q.weight, 140581773349152) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].attn.norm_q._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].attn.norm_q._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].attn.norm_q._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].attn.norm_q._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].attn.heads, accessed_by=DictGetItemGuardAccessor(heads) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[6].attn.heads == 24 # head_dim = inner_dim // attn.heads # diffusers/src/diffusers/models/attention_processor.py:1721 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].attn.processor, accessed_by=DictGetItemGuardAccessor(processor) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[6].attn.processor, 93831581524080) # attn_parameters = set(inspect.signature(self.processor.__call__).parameters.keys()) # diffusers/src/diffusers/models/attention_processor.py:479 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[6].attn.processor, 140581770191216) # return self.processor( # diffusers/src/diffusers/models/attention_processor.py:490 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].attn._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].attn._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].attn._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].attn._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].attn.forward, accessed_by=GetAttrGuardAccessor(forward) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].attn.forward, accessed_by=FuncDefaultsGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].attn.forward.__defaults__[0], accessed_by=GetItemGuardAccessor(0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[6].attn.forward.__defaults__[0], 140591004478624) # batch_size, _, _ = hidden_states.shape if encoder_hidden_states is None else encoder_hidden_states.shape # diffusers/src/diffusers/models/attention_processor.py:1713 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].norm, accessed_by=DictGetItemGuardAccessor(norm) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[6].norm, 140581770190880) # norm_hidden_states, gate = self.norm(hidden_states, emb=temb) # diffusers/src/diffusers/models/transformers/transformer_flux.py:88 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].norm.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[6].norm.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].norm.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[6].norm.training, 140591004393440) # norm_hidden_states, gate = self.norm(hidden_states, emb=temb) # diffusers/src/diffusers/models/transformers/transformer_flux.py:88 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].norm._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].norm.norm, accessed_by=DictGetItemGuardAccessor(norm) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[6].norm.norm, 140581770191024) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:171 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].norm.norm.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].norm.norm.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[6].norm.norm.training, 140591004393440) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:171 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].norm.silu, accessed_by=DictGetItemGuardAccessor(silu) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[6].norm.silu, 140581770190928) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].norm.silu.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].norm.silu.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[6].norm.silu.training, 140591004393440) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].norm.linear, accessed_by=DictGetItemGuardAccessor(linear) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[6].norm.linear, 140533114541520) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].norm.linear.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[6].norm.linear.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].norm.linear.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[6].norm.linear.training, 140591004393408) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].norm.linear._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].norm.linear.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[6].norm.linear.lora_A, 140533114551120) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].norm.linear.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].norm.linear.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[6].norm.linear.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].norm.linear.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[6].norm.linear.lora_A['default_0'], 140533114544688) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].norm.linear.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].norm.linear.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[6].norm.linear.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].norm.linear.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].norm.linear.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[6].norm.linear.lora_A['default_0'].weight, 140537319011280) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].norm.linear.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[6].norm.linear.lora_B, 140533114549824) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].norm.linear.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].norm.linear.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[6].norm.linear.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].norm.linear.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[6].norm.linear.lora_B['default_0'], 140533114541376) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].norm.linear.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].norm.linear.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[6].norm.linear.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].norm.linear.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[6].norm.linear.base_layer, 140581770190976) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].norm.linear.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].norm.linear.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[6].norm.linear.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].norm.linear.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[6].norm.linear.lora_dropout, 140533114545360) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].norm.linear.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].norm.linear.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[6].norm.linear.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].norm.linear.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[6].norm.linear.lora_dropout['default_0'], 140533114539120) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].norm.linear.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].norm.linear.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[6].norm.linear.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].norm.linear.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[6].norm.linear.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[6].norm.linear.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].norm.linear.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[6].norm.linear.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].norm.linear.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[6].norm.linear.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[6].norm.linear.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].norm.linear.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[6].norm.linear.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].norm.linear._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].norm.linear._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].norm.linear.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[6].norm.linear.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[6].norm.linear.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].norm.linear._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[6].norm.linear._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].norm.linear._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].norm.linear._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].norm.linear._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[6].norm.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[6].norm.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].norm._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].norm._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].norm._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].norm._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].act_mlp, accessed_by=DictGetItemGuardAccessor(act_mlp) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[6].act_mlp, 140581770191120) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].act_mlp.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].act_mlp.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[6].act_mlp.training, 140591004393440) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].proj_mlp, accessed_by=DictGetItemGuardAccessor(proj_mlp) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[6].proj_mlp, 140533113669664) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].proj_mlp.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[6].proj_mlp.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].proj_mlp.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[6].proj_mlp.training, 140591004393408) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].proj_mlp._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].proj_mlp.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[6].proj_mlp.lora_A, 140533114089296) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].proj_mlp.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].proj_mlp.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[6].proj_mlp.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].proj_mlp.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[6].proj_mlp.lora_A['default_0'], 140533114078016) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].proj_mlp.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].proj_mlp.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[6].proj_mlp.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].proj_mlp.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].proj_mlp.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[6].proj_mlp.lora_A['default_0'].weight, 140537319013920) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].proj_mlp.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[6].proj_mlp.lora_B, 140533114077920) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].proj_mlp.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].proj_mlp.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[6].proj_mlp.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].proj_mlp.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[6].proj_mlp.lora_B['default_0'], 140533113871072) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].proj_mlp.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].proj_mlp.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[6].proj_mlp.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].proj_mlp.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[6].proj_mlp.base_layer, 140581770191072) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].proj_mlp.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].proj_mlp.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[6].proj_mlp.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].proj_mlp.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[6].proj_mlp.lora_dropout, 140533114081424) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].proj_mlp.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].proj_mlp.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[6].proj_mlp.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].proj_mlp.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[6].proj_mlp.lora_dropout['default_0'], 140533114093520) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].proj_mlp.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].proj_mlp.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[6].proj_mlp.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].proj_mlp.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[6].proj_mlp.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[6].proj_mlp.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].proj_mlp.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[6].proj_mlp.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].proj_mlp.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[6].proj_mlp.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[6].proj_mlp.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].proj_mlp.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[6].proj_mlp.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].proj_mlp._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].proj_mlp._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].proj_mlp.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[6].proj_mlp.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[6].proj_mlp.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].proj_mlp._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[6].proj_mlp._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].proj_mlp._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].proj_mlp._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].proj_mlp._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[6].proj_mlp._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[6].proj_mlp._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].proj_out, accessed_by=DictGetItemGuardAccessor(proj_out) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[6].proj_out, 140533114587504) # hidden_states = gate * self.proj_out(hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:98 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].proj_out.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[6].proj_out.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].proj_out.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[6].proj_out.training, 140591004393408) # hidden_states = gate * self.proj_out(hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:98 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].proj_out._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].proj_out.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[6].proj_out.lora_A, 140533114587072) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].proj_out.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].proj_out.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[6].proj_out.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].proj_out.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[6].proj_out.lora_A['default_0'], 140533114587024) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].proj_out.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].proj_out.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[6].proj_out.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].proj_out.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].proj_out.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[6].proj_out.lora_A['default_0'].weight, 140537319008400) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].proj_out.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[6].proj_out.lora_B, 140533114588368) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].proj_out.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].proj_out.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[6].proj_out.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].proj_out.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[6].proj_out.lora_B['default_0'], 140533114588512) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].proj_out.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].proj_out.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[6].proj_out.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].proj_out.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[6].proj_out.base_layer, 140581770191168) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].proj_out.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].proj_out.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[6].proj_out.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].proj_out.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[6].proj_out.lora_dropout, 140533114586976) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].proj_out.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].proj_out.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[6].proj_out.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].proj_out.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[6].proj_out.lora_dropout['default_0'], 140533114586928) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].proj_out.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].proj_out.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[6].proj_out.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].proj_out.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[6].proj_out.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[6].proj_out.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].proj_out.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[6].proj_out.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].proj_out.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[6].proj_out.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[6].proj_out.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].proj_out.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[6].proj_out.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].proj_out._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].proj_out._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].proj_out.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[6].proj_out.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[6].proj_out.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].proj_out._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[6].proj_out._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].proj_out._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].proj_out._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].proj_out._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[6].proj_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[6].proj_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7], accessed_by=GetItemGuardAccessor(7) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[7], 140581770190832) # for index_block, block in enumerate(self.single_transformer_blocks): # diffusers/src/diffusers/models/transformers/transformer_flux.py:509 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[7].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[7].training, 140591004393440) # for index_block, block in enumerate(self.single_transformer_blocks): # diffusers/src/diffusers/models/transformers/transformer_flux.py:509 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].attn, accessed_by=DictGetItemGuardAccessor(attn) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[7].attn, 140581770192032) # attn_output = self.attn( # diffusers/src/diffusers/models/transformers/transformer_flux.py:91 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].attn.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[7].attn.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].attn.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[7].attn.training, 140591004393440) # attn_output = self.attn( # diffusers/src/diffusers/models/transformers/transformer_flux.py:91 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].attn._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].attn.to_k, accessed_by=DictGetItemGuardAccessor(to_k) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[7].attn.to_k, 140533116687440) # key = attn.to_k(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1717 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].attn.to_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[7].attn.to_k.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].attn.to_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[7].attn.to_k.training, 140591004393408) # key = attn.to_k(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1717 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].attn.to_k._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].attn.to_k.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[7].attn.to_k.lora_A, 140533116690224) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].attn.to_k.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].attn.to_k.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[7].attn.to_k.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].attn.to_k.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[7].attn.to_k.lora_A['default_0'], 140533116698432) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].attn.to_k.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].attn.to_k.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[7].attn.to_k.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].attn.to_k.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].attn.to_k.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[7].attn.to_k.lora_A['default_0'].weight, 140537318745776) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].attn.to_k.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[7].attn.to_k.lora_B, 140533116686720) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].attn.to_k.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].attn.to_k.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[7].attn.to_k.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].attn.to_k.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[7].attn.to_k.lora_B['default_0'], 140533116689072) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].attn.to_k.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].attn.to_k.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[7].attn.to_k.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].attn.to_k.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[7].attn.to_k.base_layer, 140581770192176) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].attn.to_k.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].attn.to_k.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[7].attn.to_k.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].attn.to_k.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[7].attn.to_k.lora_dropout, 140533116697664) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].attn.to_k.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].attn.to_k.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[7].attn.to_k.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].attn.to_k.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[7].attn.to_k.lora_dropout['default_0'], 140533116697712) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].attn.to_k.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].attn.to_k.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[7].attn.to_k.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].attn.to_k.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[7].attn.to_k.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[7].attn.to_k.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].attn.to_k.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[7].attn.to_k.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].attn.to_k.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[7].attn.to_k.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[7].attn.to_k.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].attn.to_k.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[7].attn.to_k.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].attn.to_k._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].attn.to_k._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].attn.to_k.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[7].attn.to_k.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[7].attn.to_k.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].attn.to_k._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[7].attn.to_k._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].attn.to_k._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].attn.to_k._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].attn.to_k._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[7].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[7].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].attn.to_q, accessed_by=DictGetItemGuardAccessor(to_q) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[7].attn.to_q, 140533112543440) # query = attn.to_q(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1716 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].attn.to_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[7].attn.to_q.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].attn.to_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[7].attn.to_q.training, 140591004393408) # query = attn.to_q(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1716 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].attn.to_q._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].attn.to_q.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[7].attn.to_q.lora_A, 140533112549056) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].attn.to_q.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].attn.to_q.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[7].attn.to_q.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].attn.to_q.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[7].attn.to_q.lora_A['default_0'], 140533116682688) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].attn.to_q.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].attn.to_q.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[7].attn.to_q.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].attn.to_q.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].attn.to_q.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[7].attn.to_q.lora_A['default_0'].weight, 140537318897552) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].attn.to_q.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[7].attn.to_q.lora_B, 140533112543296) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].attn.to_q.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].attn.to_q.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[7].attn.to_q.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].attn.to_q.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[7].attn.to_q.lora_B['default_0'], 140533116685280) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].attn.to_q.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].attn.to_q.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[7].attn.to_q.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].attn.to_q.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[7].attn.to_q.base_layer, 140581770192272) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].attn.to_q.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].attn.to_q.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[7].attn.to_q.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].attn.to_q.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[7].attn.to_q.lora_dropout, 140533112542000) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].attn.to_q.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].attn.to_q.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[7].attn.to_q.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].attn.to_q.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[7].attn.to_q.lora_dropout['default_0'], 140533112540992) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].attn.to_q.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].attn.to_q.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[7].attn.to_q.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].attn.to_q.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[7].attn.to_q.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[7].attn.to_q.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].attn.to_q.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[7].attn.to_q.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].attn.to_q.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[7].attn.to_q.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[7].attn.to_q.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].attn.to_q.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[7].attn.to_q.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].attn.to_q._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].attn.to_q._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].attn.to_q.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[7].attn.to_q.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[7].attn.to_q.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].attn.to_q._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[7].attn.to_q._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].attn.to_q._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].attn.to_q._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].attn.to_q._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[7].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[7].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].attn.to_v, accessed_by=DictGetItemGuardAccessor(to_v) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[7].attn.to_v, 140533116695120) # value = attn.to_v(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1718 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].attn.to_v.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[7].attn.to_v.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].attn.to_v.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[7].attn.to_v.training, 140591004393408) # value = attn.to_v(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1718 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].attn.to_v._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].attn.to_v.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[7].attn.to_v.lora_A, 140533116687248) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].attn.to_v.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].attn.to_v.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[7].attn.to_v.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].attn.to_v.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[7].attn.to_v.lora_A['default_0'], 140533113933728) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].attn.to_v.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].attn.to_v.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[7].attn.to_v.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].attn.to_v.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].attn.to_v.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[7].attn.to_v.lora_A['default_0'].weight, 140537318750816) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].attn.to_v.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[7].attn.to_v.lora_B, 140533116697616) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].attn.to_v.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].attn.to_v.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[7].attn.to_v.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].attn.to_v.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[7].attn.to_v.lora_B['default_0'], 140533113932240) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].attn.to_v.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].attn.to_v.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[7].attn.to_v.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].attn.to_v.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[7].attn.to_v.base_layer, 140581770192320) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].attn.to_v.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].attn.to_v.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[7].attn.to_v.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].attn.to_v.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[7].attn.to_v.lora_dropout, 140533116692144) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].attn.to_v.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].attn.to_v.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[7].attn.to_v.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].attn.to_v.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[7].attn.to_v.lora_dropout['default_0'], 140533116696992) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].attn.to_v.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].attn.to_v.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[7].attn.to_v.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].attn.to_v.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[7].attn.to_v.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[7].attn.to_v.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].attn.to_v.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[7].attn.to_v.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].attn.to_v.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[7].attn.to_v.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[7].attn.to_v.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].attn.to_v.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[7].attn.to_v.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].attn.to_v._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].attn.to_v._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].attn.to_v.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[7].attn.to_v.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[7].attn.to_v.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].attn.to_v._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[7].attn.to_v._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].attn.to_v._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].attn.to_v._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].attn.to_v._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[7].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[7].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].attn.norm_k, accessed_by=DictGetItemGuardAccessor(norm_k) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[7].attn.norm_k, 140581770192224) # if attn.norm_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1729 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].attn.norm_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[7].attn.norm_k.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].attn.norm_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[7].attn.norm_k.training, 140591004393440) # if attn.norm_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1729 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].attn.norm_k.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[7].attn.norm_k.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].attn.norm_k._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].attn.norm_k.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[7].attn.norm_k.weight, 140581765865344) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].attn.norm_k._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].attn.norm_k._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].attn.norm_k._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].attn.norm_k._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].attn.norm_q, accessed_by=DictGetItemGuardAccessor(norm_q) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[7].attn.norm_q, 140581770192080) # if attn.norm_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1727 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].attn.norm_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[7].attn.norm_q.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].attn.norm_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[7].attn.norm_q.training, 140591004393440) # if attn.norm_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1727 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].attn.norm_q.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[7].attn.norm_q.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].attn.norm_q._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].attn.norm_q.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[7].attn.norm_q.weight, 140581783351872) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].attn.norm_q._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].attn.norm_q._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].attn.norm_q._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].attn.norm_q._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].attn.heads, accessed_by=DictGetItemGuardAccessor(heads) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[7].attn.heads == 24 # head_dim = inner_dim // attn.heads # diffusers/src/diffusers/models/attention_processor.py:1721 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].attn.processor, accessed_by=DictGetItemGuardAccessor(processor) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[7].attn.processor, 93831581524080) # attn_parameters = set(inspect.signature(self.processor.__call__).parameters.keys()) # diffusers/src/diffusers/models/attention_processor.py:479 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[7].attn.processor, 140581770191984) # return self.processor( # diffusers/src/diffusers/models/attention_processor.py:490 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].attn._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].attn._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].attn._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].attn._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].attn.forward, accessed_by=GetAttrGuardAccessor(forward) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].attn.forward, accessed_by=FuncDefaultsGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].attn.forward.__defaults__[0], accessed_by=GetItemGuardAccessor(0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[7].attn.forward.__defaults__[0], 140591004478624) # batch_size, _, _ = hidden_states.shape if encoder_hidden_states is None else encoder_hidden_states.shape # diffusers/src/diffusers/models/attention_processor.py:1713 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].norm, accessed_by=DictGetItemGuardAccessor(norm) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[7].norm, 140581770191648) # norm_hidden_states, gate = self.norm(hidden_states, emb=temb) # diffusers/src/diffusers/models/transformers/transformer_flux.py:88 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].norm.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[7].norm.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].norm.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[7].norm.training, 140591004393440) # norm_hidden_states, gate = self.norm(hidden_states, emb=temb) # diffusers/src/diffusers/models/transformers/transformer_flux.py:88 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].norm._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].norm.norm, accessed_by=DictGetItemGuardAccessor(norm) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[7].norm.norm, 140581770191792) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:171 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].norm.norm.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].norm.norm.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[7].norm.norm.training, 140591004393440) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:171 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].norm.silu, accessed_by=DictGetItemGuardAccessor(silu) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[7].norm.silu, 140581770191696) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].norm.silu.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].norm.silu.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[7].norm.silu.training, 140591004393440) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].norm.linear, accessed_by=DictGetItemGuardAccessor(linear) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[7].norm.linear, 140533112550832) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].norm.linear.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[7].norm.linear.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].norm.linear.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[7].norm.linear.training, 140591004393408) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].norm.linear._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].norm.linear.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[7].norm.linear.lora_A, 140533112551648) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].norm.linear.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].norm.linear.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[7].norm.linear.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].norm.linear.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[7].norm.linear.lora_A['default_0'], 140533112544352) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].norm.linear.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].norm.linear.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[7].norm.linear.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].norm.linear.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].norm.linear.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[7].norm.linear.lora_A['default_0'].weight, 140537318888992) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].norm.linear.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[7].norm.linear.lora_B, 140533112550592) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].norm.linear.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].norm.linear.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[7].norm.linear.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].norm.linear.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[7].norm.linear.lora_B['default_0'], 140533112546128) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].norm.linear.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].norm.linear.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[7].norm.linear.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].norm.linear.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[7].norm.linear.base_layer, 140581770191744) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].norm.linear.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].norm.linear.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[7].norm.linear.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].norm.linear.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[7].norm.linear.lora_dropout, 140533112550304) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].norm.linear.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].norm.linear.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[7].norm.linear.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].norm.linear.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[7].norm.linear.lora_dropout['default_0'], 140533112550784) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].norm.linear.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].norm.linear.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[7].norm.linear.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].norm.linear.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[7].norm.linear.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[7].norm.linear.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].norm.linear.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[7].norm.linear.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].norm.linear.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[7].norm.linear.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[7].norm.linear.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].norm.linear.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[7].norm.linear.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].norm.linear._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].norm.linear._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].norm.linear.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[7].norm.linear.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[7].norm.linear.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].norm.linear._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[7].norm.linear._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].norm.linear._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].norm.linear._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].norm.linear._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[7].norm.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[7].norm.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].norm._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].norm._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].norm._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].norm._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].act_mlp, accessed_by=DictGetItemGuardAccessor(act_mlp) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[7].act_mlp, 140581770191888) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].act_mlp.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].act_mlp.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[7].act_mlp.training, 140591004393440) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].proj_mlp, accessed_by=DictGetItemGuardAccessor(proj_mlp) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[7].proj_mlp, 140533112544208) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].proj_mlp.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[7].proj_mlp.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].proj_mlp.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[7].proj_mlp.training, 140591004393408) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].proj_mlp._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].proj_mlp.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[7].proj_mlp.lora_A, 140533112545024) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].proj_mlp.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].proj_mlp.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[7].proj_mlp.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].proj_mlp.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[7].proj_mlp.lora_A['default_0'], 140533112540608) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].proj_mlp.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].proj_mlp.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[7].proj_mlp.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].proj_mlp.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].proj_mlp.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[7].proj_mlp.lora_A['default_0'].weight, 140537318893792) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].proj_mlp.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[7].proj_mlp.lora_B, 140533112540800) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].proj_mlp.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].proj_mlp.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[7].proj_mlp.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].proj_mlp.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[7].proj_mlp.lora_B['default_0'], 140533112540752) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].proj_mlp.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].proj_mlp.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[7].proj_mlp.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].proj_mlp.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[7].proj_mlp.base_layer, 140581770191840) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].proj_mlp.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].proj_mlp.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[7].proj_mlp.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].proj_mlp.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[7].proj_mlp.lora_dropout, 140533112546080) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].proj_mlp.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].proj_mlp.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[7].proj_mlp.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].proj_mlp.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[7].proj_mlp.lora_dropout['default_0'], 140533112544688) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].proj_mlp.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].proj_mlp.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[7].proj_mlp.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].proj_mlp.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[7].proj_mlp.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[7].proj_mlp.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].proj_mlp.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[7].proj_mlp.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].proj_mlp.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[7].proj_mlp.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[7].proj_mlp.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].proj_mlp.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[7].proj_mlp.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].proj_mlp._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].proj_mlp._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].proj_mlp.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[7].proj_mlp.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[7].proj_mlp.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].proj_mlp._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[7].proj_mlp._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].proj_mlp._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].proj_mlp._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].proj_mlp._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[7].proj_mlp._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[7].proj_mlp._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].proj_out, accessed_by=DictGetItemGuardAccessor(proj_out) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[7].proj_out, 140533112543056) # hidden_states = gate * self.proj_out(hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:98 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].proj_out.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[7].proj_out.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].proj_out.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[7].proj_out.training, 140591004393408) # hidden_states = gate * self.proj_out(hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:98 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].proj_out._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].proj_out.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[7].proj_out.lora_A, 140533112542240) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].proj_out.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].proj_out.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[7].proj_out.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].proj_out.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[7].proj_out.lora_A['default_0'], 140533112541856) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].proj_out.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].proj_out.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[7].proj_out.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].proj_out.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].proj_out.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[7].proj_out.lora_A['default_0'].weight, 140537318892512) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].proj_out.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[7].proj_out.lora_B, 140533112541424) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].proj_out.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].proj_out.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[7].proj_out.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].proj_out.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[7].proj_out.lora_B['default_0'], 140533112540560) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].proj_out.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].proj_out.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[7].proj_out.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].proj_out.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[7].proj_out.base_layer, 140581770191936) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].proj_out.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].proj_out.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[7].proj_out.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].proj_out.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[7].proj_out.lora_dropout, 140533112542720) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].proj_out.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].proj_out.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[7].proj_out.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].proj_out.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[7].proj_out.lora_dropout['default_0'], 140533112542576) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].proj_out.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].proj_out.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[7].proj_out.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].proj_out.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[7].proj_out.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[7].proj_out.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].proj_out.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[7].proj_out.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].proj_out.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[7].proj_out.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[7].proj_out.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].proj_out.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[7].proj_out.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].proj_out._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].proj_out._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].proj_out.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[7].proj_out.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[7].proj_out.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].proj_out._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[7].proj_out._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].proj_out._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].proj_out._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].proj_out._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[7].proj_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[7].proj_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8], accessed_by=GetItemGuardAccessor(8) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[8], 140581770191600) # for index_block, block in enumerate(self.single_transformer_blocks): # diffusers/src/diffusers/models/transformers/transformer_flux.py:509 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[8].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[8].training, 140591004393440) # for index_block, block in enumerate(self.single_transformer_blocks): # diffusers/src/diffusers/models/transformers/transformer_flux.py:509 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].attn, accessed_by=DictGetItemGuardAccessor(attn) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[8].attn, 140581770192800) # attn_output = self.attn( # diffusers/src/diffusers/models/transformers/transformer_flux.py:91 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].attn.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[8].attn.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].attn.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[8].attn.training, 140591004393440) # attn_output = self.attn( # diffusers/src/diffusers/models/transformers/transformer_flux.py:91 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].attn._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].attn.to_k, accessed_by=DictGetItemGuardAccessor(to_k) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[8].attn.to_k, 140533112554976) # key = attn.to_k(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1717 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].attn.to_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[8].attn.to_k.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].attn.to_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[8].attn.to_k.training, 140591004393408) # key = attn.to_k(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1717 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].attn.to_k._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].attn.to_k.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[8].attn.to_k.lora_A, 140533112558624) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].attn.to_k.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].attn.to_k.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[8].attn.to_k.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].attn.to_k.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[8].attn.to_k.lora_A['default_0'], 140533112557232) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].attn.to_k.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].attn.to_k.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[8].attn.to_k.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].attn.to_k.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].attn.to_k.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[8].attn.to_k.lora_A['default_0'].weight, 140537318560512) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].attn.to_k.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[8].attn.to_k.lora_B, 140533112556992) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].attn.to_k.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].attn.to_k.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[8].attn.to_k.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].attn.to_k.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[8].attn.to_k.lora_B['default_0'], 140533112559776) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].attn.to_k.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].attn.to_k.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[8].attn.to_k.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].attn.to_k.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[8].attn.to_k.base_layer, 140581770192944) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].attn.to_k.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].attn.to_k.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[8].attn.to_k.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].attn.to_k.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[8].attn.to_k.lora_dropout, 140533112558816) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].attn.to_k.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].attn.to_k.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[8].attn.to_k.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].attn.to_k.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[8].attn.to_k.lora_dropout['default_0'], 140533112558720) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].attn.to_k.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].attn.to_k.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[8].attn.to_k.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].attn.to_k.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[8].attn.to_k.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[8].attn.to_k.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].attn.to_k.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[8].attn.to_k.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].attn.to_k.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[8].attn.to_k.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[8].attn.to_k.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].attn.to_k.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[8].attn.to_k.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].attn.to_k._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].attn.to_k._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].attn.to_k.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[8].attn.to_k.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[8].attn.to_k.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].attn.to_k._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[8].attn.to_k._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].attn.to_k._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].attn.to_k._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].attn.to_k._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[8].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[8].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].attn.to_q, accessed_by=DictGetItemGuardAccessor(to_q) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[8].attn.to_q, 140533112561936) # query = attn.to_q(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1716 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].attn.to_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[8].attn.to_q.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].attn.to_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[8].attn.to_q.training, 140591004393408) # query = attn.to_q(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1716 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].attn.to_q._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].attn.to_q.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[8].attn.to_q.lora_A, 140533112561840) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].attn.to_q.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].attn.to_q.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[8].attn.to_q.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].attn.to_q.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[8].attn.to_q.lora_A['default_0'], 140533112557520) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].attn.to_q.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].attn.to_q.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[8].attn.to_q.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].attn.to_q.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].attn.to_q.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[8].attn.to_q.lora_A['default_0'].weight, 140537318740096) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].attn.to_q.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[8].attn.to_q.lora_B, 140533112561072) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].attn.to_q.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].attn.to_q.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[8].attn.to_q.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].attn.to_q.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[8].attn.to_q.lora_B['default_0'], 140533112560592) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].attn.to_q.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].attn.to_q.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[8].attn.to_q.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].attn.to_q.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[8].attn.to_q.base_layer, 140581770193040) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].attn.to_q.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].attn.to_q.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[8].attn.to_q.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].attn.to_q.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[8].attn.to_q.lora_dropout, 140533112556896) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].attn.to_q.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].attn.to_q.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[8].attn.to_q.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].attn.to_q.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[8].attn.to_q.lora_dropout['default_0'], 140533112559872) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].attn.to_q.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].attn.to_q.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[8].attn.to_q.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].attn.to_q.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[8].attn.to_q.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[8].attn.to_q.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].attn.to_q.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[8].attn.to_q.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].attn.to_q.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[8].attn.to_q.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[8].attn.to_q.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].attn.to_q.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[8].attn.to_q.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].attn.to_q._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].attn.to_q._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].attn.to_q.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[8].attn.to_q.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[8].attn.to_q.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].attn.to_q._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[8].attn.to_q._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].attn.to_q._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].attn.to_q._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].attn.to_q._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[8].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[8].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].attn.to_v, accessed_by=DictGetItemGuardAccessor(to_v) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[8].attn.to_v, 140533112559152) # value = attn.to_v(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1718 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].attn.to_v.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[8].attn.to_v.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].attn.to_v.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[8].attn.to_v.training, 140591004393408) # value = attn.to_v(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1718 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].attn.to_v._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].attn.to_v.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[8].attn.to_v.lora_A, 140533112558864) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].attn.to_v.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].attn.to_v.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[8].attn.to_v.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].attn.to_v.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[8].attn.to_v.lora_A['default_0'], 140533112567984) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].attn.to_v.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].attn.to_v.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[8].attn.to_v.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].attn.to_v.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].attn.to_v.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[8].attn.to_v.lora_A['default_0'].weight, 140537318558992) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].attn.to_v.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[8].attn.to_v.lora_B, 140533112567552) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].attn.to_v.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].attn.to_v.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[8].attn.to_v.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].attn.to_v.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[8].attn.to_v.lora_B['default_0'], 140533112568176) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].attn.to_v.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].attn.to_v.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[8].attn.to_v.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].attn.to_v.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[8].attn.to_v.base_layer, 140581770193088) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].attn.to_v.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].attn.to_v.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[8].attn.to_v.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].attn.to_v.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[8].attn.to_v.lora_dropout, 140533112559680) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].attn.to_v.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].attn.to_v.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[8].attn.to_v.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].attn.to_v.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[8].attn.to_v.lora_dropout['default_0'], 140533112555936) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].attn.to_v.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].attn.to_v.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[8].attn.to_v.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].attn.to_v.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[8].attn.to_v.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[8].attn.to_v.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].attn.to_v.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[8].attn.to_v.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].attn.to_v.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[8].attn.to_v.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[8].attn.to_v.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].attn.to_v.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[8].attn.to_v.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].attn.to_v._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].attn.to_v._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].attn.to_v.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[8].attn.to_v.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[8].attn.to_v.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].attn.to_v._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[8].attn.to_v._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].attn.to_v._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].attn.to_v._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].attn.to_v._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[8].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[8].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].attn.norm_k, accessed_by=DictGetItemGuardAccessor(norm_k) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[8].attn.norm_k, 140581770192992) # if attn.norm_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1729 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].attn.norm_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[8].attn.norm_k.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].attn.norm_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[8].attn.norm_k.training, 140591004393440) # if attn.norm_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1729 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].attn.norm_k.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[8].attn.norm_k.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].attn.norm_k._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].attn.norm_k.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[8].attn.norm_k.weight, 140581766103104) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].attn.norm_k._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].attn.norm_k._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].attn.norm_k._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].attn.norm_k._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].attn.norm_q, accessed_by=DictGetItemGuardAccessor(norm_q) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[8].attn.norm_q, 140581770192848) # if attn.norm_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1727 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].attn.norm_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[8].attn.norm_q.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].attn.norm_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[8].attn.norm_q.training, 140591004393440) # if attn.norm_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1727 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].attn.norm_q.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[8].attn.norm_q.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].attn.norm_q._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].attn.norm_q.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[8].attn.norm_q.weight, 140581773350752) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].attn.norm_q._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].attn.norm_q._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].attn.norm_q._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].attn.norm_q._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].attn.heads, accessed_by=DictGetItemGuardAccessor(heads) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[8].attn.heads == 24 # head_dim = inner_dim // attn.heads # diffusers/src/diffusers/models/attention_processor.py:1721 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].attn.processor, accessed_by=DictGetItemGuardAccessor(processor) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[8].attn.processor, 93831581524080) # attn_parameters = set(inspect.signature(self.processor.__call__).parameters.keys()) # diffusers/src/diffusers/models/attention_processor.py:479 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[8].attn.processor, 140581770192752) # return self.processor( # diffusers/src/diffusers/models/attention_processor.py:490 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].attn._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].attn._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].attn._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].attn._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].attn.forward, accessed_by=GetAttrGuardAccessor(forward) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].attn.forward, accessed_by=FuncDefaultsGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].attn.forward.__defaults__[0], accessed_by=GetItemGuardAccessor(0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[8].attn.forward.__defaults__[0], 140591004478624) # batch_size, _, _ = hidden_states.shape if encoder_hidden_states is None else encoder_hidden_states.shape # diffusers/src/diffusers/models/attention_processor.py:1713 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].norm, accessed_by=DictGetItemGuardAccessor(norm) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[8].norm, 140581770192416) # norm_hidden_states, gate = self.norm(hidden_states, emb=temb) # diffusers/src/diffusers/models/transformers/transformer_flux.py:88 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].norm.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[8].norm.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].norm.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[8].norm.training, 140591004393440) # norm_hidden_states, gate = self.norm(hidden_states, emb=temb) # diffusers/src/diffusers/models/transformers/transformer_flux.py:88 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].norm._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].norm.norm, accessed_by=DictGetItemGuardAccessor(norm) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[8].norm.norm, 140581770192560) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:171 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].norm.norm.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].norm.norm.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[8].norm.norm.training, 140591004393440) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:171 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].norm.silu, accessed_by=DictGetItemGuardAccessor(silu) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[8].norm.silu, 140581770192464) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].norm.silu.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].norm.silu.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[8].norm.silu.training, 140591004393440) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].norm.linear, accessed_by=DictGetItemGuardAccessor(linear) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[8].norm.linear, 140533113933968) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].norm.linear.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[8].norm.linear.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].norm.linear.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[8].norm.linear.training, 140591004393408) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].norm.linear._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].norm.linear.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[8].norm.linear.lora_A, 140533113943136) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].norm.linear.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].norm.linear.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[8].norm.linear.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].norm.linear.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[8].norm.linear.lora_A['default_0'], 140533112556080) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].norm.linear.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].norm.linear.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[8].norm.linear.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].norm.linear.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].norm.linear.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[8].norm.linear.lora_A['default_0'].weight, 140537318738816) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].norm.linear.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[8].norm.linear.lora_B, 140533113939296) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].norm.linear.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].norm.linear.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[8].norm.linear.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].norm.linear.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[8].norm.linear.lora_B['default_0'], 140533112556320) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].norm.linear.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].norm.linear.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[8].norm.linear.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].norm.linear.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[8].norm.linear.base_layer, 140581770192512) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].norm.linear.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].norm.linear.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[8].norm.linear.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].norm.linear.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[8].norm.linear.lora_dropout, 140533113932624) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].norm.linear.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].norm.linear.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[8].norm.linear.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].norm.linear.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[8].norm.linear.lora_dropout['default_0'], 140533113943760) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].norm.linear.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].norm.linear.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[8].norm.linear.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].norm.linear.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[8].norm.linear.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[8].norm.linear.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].norm.linear.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[8].norm.linear.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].norm.linear.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[8].norm.linear.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[8].norm.linear.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].norm.linear.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[8].norm.linear.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].norm.linear._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].norm.linear._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].norm.linear.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[8].norm.linear.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[8].norm.linear.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].norm.linear._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[8].norm.linear._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].norm.linear._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].norm.linear._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].norm.linear._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[8].norm.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[8].norm.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].norm._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].norm._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].norm._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].norm._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].act_mlp, accessed_by=DictGetItemGuardAccessor(act_mlp) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[8].act_mlp, 140581770192656) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].act_mlp.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].act_mlp.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[8].act_mlp.training, 140591004393440) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].proj_mlp, accessed_by=DictGetItemGuardAccessor(proj_mlp) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[8].proj_mlp, 140533112567168) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].proj_mlp.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[8].proj_mlp.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].proj_mlp.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[8].proj_mlp.training, 140591004393408) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].proj_mlp._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].proj_mlp.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[8].proj_mlp.lora_A, 140533112565248) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].proj_mlp.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].proj_mlp.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[8].proj_mlp.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].proj_mlp.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[8].proj_mlp.lora_A['default_0'], 140533112568224) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].proj_mlp.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].proj_mlp.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[8].proj_mlp.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].proj_mlp.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].proj_mlp.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[8].proj_mlp.lora_A['default_0'].weight, 140537318749936) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].proj_mlp.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[8].proj_mlp.lora_B, 140533112567888) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].proj_mlp.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].proj_mlp.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[8].proj_mlp.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].proj_mlp.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[8].proj_mlp.lora_B['default_0'], 140533112567744) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].proj_mlp.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].proj_mlp.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[8].proj_mlp.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].proj_mlp.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[8].proj_mlp.base_layer, 140581770192608) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].proj_mlp.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].proj_mlp.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[8].proj_mlp.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].proj_mlp.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[8].proj_mlp.lora_dropout, 140533112568080) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].proj_mlp.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].proj_mlp.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[8].proj_mlp.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].proj_mlp.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[8].proj_mlp.lora_dropout['default_0'], 140533112556368) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].proj_mlp.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].proj_mlp.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[8].proj_mlp.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].proj_mlp.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[8].proj_mlp.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[8].proj_mlp.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].proj_mlp.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[8].proj_mlp.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].proj_mlp.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[8].proj_mlp.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[8].proj_mlp.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].proj_mlp.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[8].proj_mlp.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].proj_mlp._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].proj_mlp._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].proj_mlp.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[8].proj_mlp.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[8].proj_mlp.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].proj_mlp._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[8].proj_mlp._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].proj_mlp._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].proj_mlp._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].proj_mlp._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[8].proj_mlp._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[8].proj_mlp._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].proj_out, accessed_by=DictGetItemGuardAccessor(proj_out) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[8].proj_out, 140533112567120) # hidden_states = gate * self.proj_out(hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:98 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].proj_out.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[8].proj_out.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].proj_out.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[8].proj_out.training, 140591004393408) # hidden_states = gate * self.proj_out(hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:98 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].proj_out._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].proj_out.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[8].proj_out.lora_A, 140533112565680) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].proj_out.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].proj_out.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[8].proj_out.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].proj_out.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[8].proj_out.lora_A['default_0'], 140533112560832) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].proj_out.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].proj_out.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[8].proj_out.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].proj_out.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].proj_out.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[8].proj_out.lora_A['default_0'].weight, 140537318747536) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].proj_out.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[8].proj_out.lora_B, 140533112566832) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].proj_out.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].proj_out.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[8].proj_out.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].proj_out.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[8].proj_out.lora_B['default_0'], 140533112560016) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].proj_out.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].proj_out.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[8].proj_out.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].proj_out.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[8].proj_out.base_layer, 140581770192704) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].proj_out.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].proj_out.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[8].proj_out.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].proj_out.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[8].proj_out.lora_dropout, 140533112567504) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].proj_out.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].proj_out.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[8].proj_out.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].proj_out.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[8].proj_out.lora_dropout['default_0'], 140533112564480) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].proj_out.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].proj_out.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[8].proj_out.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].proj_out.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[8].proj_out.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[8].proj_out.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].proj_out.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[8].proj_out.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].proj_out.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[8].proj_out.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[8].proj_out.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].proj_out.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[8].proj_out.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].proj_out._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].proj_out._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].proj_out.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[8].proj_out.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[8].proj_out.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].proj_out._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[8].proj_out._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].proj_out._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].proj_out._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].proj_out._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[8].proj_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[8].proj_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9], accessed_by=GetItemGuardAccessor(9) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[9], 140581770192368) # for index_block, block in enumerate(self.single_transformer_blocks): # diffusers/src/diffusers/models/transformers/transformer_flux.py:509 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[9].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[9].training, 140591004393440) # for index_block, block in enumerate(self.single_transformer_blocks): # diffusers/src/diffusers/models/transformers/transformer_flux.py:509 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].attn, accessed_by=DictGetItemGuardAccessor(attn) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[9].attn, 140581770193568) # attn_output = self.attn( # diffusers/src/diffusers/models/transformers/transformer_flux.py:91 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].attn.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[9].attn.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].attn.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[9].attn.training, 140591004393440) # attn_output = self.attn( # diffusers/src/diffusers/models/transformers/transformer_flux.py:91 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].attn._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].attn.to_k, accessed_by=DictGetItemGuardAccessor(to_k) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[9].attn.to_k, 140533112515088) # key = attn.to_k(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1717 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].attn.to_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[9].attn.to_k.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].attn.to_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[9].attn.to_k.training, 140591004393408) # key = attn.to_k(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1717 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].attn.to_k._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].attn.to_k.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[9].attn.to_k.lora_A, 140533112513264) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].attn.to_k.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].attn.to_k.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[9].attn.to_k.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].attn.to_k.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[9].attn.to_k.lora_A['default_0'], 140533112512208) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].attn.to_k.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].attn.to_k.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[9].attn.to_k.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].attn.to_k.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].attn.to_k.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[9].attn.to_k.lora_A['default_0'].weight, 140537318556032) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].attn.to_k.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[9].attn.to_k.lora_B, 140533112509856) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].attn.to_k.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].attn.to_k.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[9].attn.to_k.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].attn.to_k.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[9].attn.to_k.lora_B['default_0'], 140533112505344) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].attn.to_k.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].attn.to_k.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[9].attn.to_k.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].attn.to_k.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[9].attn.to_k.base_layer, 140581770193712) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].attn.to_k.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].attn.to_k.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[9].attn.to_k.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].attn.to_k.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[9].attn.to_k.lora_dropout, 140533112513696) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].attn.to_k.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].attn.to_k.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[9].attn.to_k.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].attn.to_k.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[9].attn.to_k.lora_dropout['default_0'], 140533112513408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].attn.to_k.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].attn.to_k.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[9].attn.to_k.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].attn.to_k.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[9].attn.to_k.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[9].attn.to_k.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].attn.to_k.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[9].attn.to_k.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].attn.to_k.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[9].attn.to_k.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[9].attn.to_k.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].attn.to_k.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[9].attn.to_k.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].attn.to_k._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].attn.to_k._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].attn.to_k.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[9].attn.to_k.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[9].attn.to_k.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].attn.to_k._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[9].attn.to_k._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].attn.to_k._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].attn.to_k._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].attn.to_k._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[9].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[9].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].attn.to_q, accessed_by=DictGetItemGuardAccessor(to_q) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[9].attn.to_q, 140533112513024) # query = attn.to_q(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1716 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].attn.to_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[9].attn.to_q.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].attn.to_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[9].attn.to_q.training, 140591004393408) # query = attn.to_q(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1716 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].attn.to_q._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].attn.to_q.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[9].attn.to_q.lora_A, 140533112508704) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].attn.to_q.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].attn.to_q.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[9].attn.to_q.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].attn.to_q.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[9].attn.to_q.lora_A['default_0'], 140533112510336) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].attn.to_q.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].attn.to_q.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[9].attn.to_q.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].attn.to_q.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].attn.to_q.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[9].attn.to_q.lora_A['default_0'].weight, 140537318558912) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].attn.to_q.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[9].attn.to_q.lora_B, 140533112514464) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].attn.to_q.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].attn.to_q.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[9].attn.to_q.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].attn.to_q.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[9].attn.to_q.lora_B['default_0'], 140533112510384) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].attn.to_q.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].attn.to_q.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[9].attn.to_q.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].attn.to_q.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[9].attn.to_q.base_layer, 140581770193808) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].attn.to_q.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].attn.to_q.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[9].attn.to_q.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].attn.to_q.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[9].attn.to_q.lora_dropout, 140533112511920) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].attn.to_q.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].attn.to_q.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[9].attn.to_q.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].attn.to_q.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[9].attn.to_q.lora_dropout['default_0'], 140533112512064) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].attn.to_q.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].attn.to_q.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[9].attn.to_q.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].attn.to_q.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[9].attn.to_q.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[9].attn.to_q.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].attn.to_q.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[9].attn.to_q.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].attn.to_q.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[9].attn.to_q.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[9].attn.to_q.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].attn.to_q.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[9].attn.to_q.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].attn.to_q._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].attn.to_q._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].attn.to_q.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[9].attn.to_q.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[9].attn.to_q.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].attn.to_q._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[9].attn.to_q._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].attn.to_q._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].attn.to_q._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].attn.to_q._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[9].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[9].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].attn.to_v, accessed_by=DictGetItemGuardAccessor(to_v) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[9].attn.to_v, 140533112504576) # value = attn.to_v(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1718 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].attn.to_v.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[9].attn.to_v.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].attn.to_v.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[9].attn.to_v.training, 140591004393408) # value = attn.to_v(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1718 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].attn.to_v._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].attn.to_v.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[9].attn.to_v.lora_A, 140533112506784) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].attn.to_v.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].attn.to_v.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[9].attn.to_v.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].attn.to_v.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[9].attn.to_v.lora_A['default_0'], 140533112505776) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].attn.to_v.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].attn.to_v.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[9].attn.to_v.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].attn.to_v.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].attn.to_v.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[9].attn.to_v.lora_A['default_0'].weight, 140537318444464) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].attn.to_v.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[9].attn.to_v.lora_B, 140533112506352) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].attn.to_v.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].attn.to_v.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[9].attn.to_v.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].attn.to_v.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[9].attn.to_v.lora_B['default_0'], 140533112505920) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].attn.to_v.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].attn.to_v.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[9].attn.to_v.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].attn.to_v.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[9].attn.to_v.base_layer, 140581770193856) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].attn.to_v.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].attn.to_v.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[9].attn.to_v.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].attn.to_v.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[9].attn.to_v.lora_dropout, 140533112504432) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].attn.to_v.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].attn.to_v.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[9].attn.to_v.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].attn.to_v.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[9].attn.to_v.lora_dropout['default_0'], 140533112504528) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].attn.to_v.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].attn.to_v.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[9].attn.to_v.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].attn.to_v.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[9].attn.to_v.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[9].attn.to_v.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].attn.to_v.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[9].attn.to_v.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].attn.to_v.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[9].attn.to_v.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[9].attn.to_v.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].attn.to_v.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[9].attn.to_v.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].attn.to_v._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].attn.to_v._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].attn.to_v.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[9].attn.to_v.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[9].attn.to_v.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].attn.to_v._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[9].attn.to_v._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].attn.to_v._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].attn.to_v._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].attn.to_v._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[9].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[9].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].attn.norm_k, accessed_by=DictGetItemGuardAccessor(norm_k) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[9].attn.norm_k, 140581770193760) # if attn.norm_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1729 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].attn.norm_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[9].attn.norm_k.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].attn.norm_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[9].attn.norm_k.training, 140591004393440) # if attn.norm_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1729 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].attn.norm_k.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[9].attn.norm_k.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].attn.norm_k._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].attn.norm_k.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[9].attn.norm_k.weight, 140581765867824) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].attn.norm_k._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].attn.norm_k._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].attn.norm_k._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].attn.norm_k._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].attn.norm_q, accessed_by=DictGetItemGuardAccessor(norm_q) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[9].attn.norm_q, 140581770193616) # if attn.norm_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1727 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].attn.norm_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[9].attn.norm_q.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].attn.norm_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[9].attn.norm_q.training, 140591004393440) # if attn.norm_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1727 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].attn.norm_q.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[9].attn.norm_q.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].attn.norm_q._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].attn.norm_q.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[9].attn.norm_q.weight, 140581773357792) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].attn.norm_q._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].attn.norm_q._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].attn.norm_q._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].attn.norm_q._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].attn.heads, accessed_by=DictGetItemGuardAccessor(heads) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[9].attn.heads == 24 # head_dim = inner_dim // attn.heads # diffusers/src/diffusers/models/attention_processor.py:1721 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].attn.processor, accessed_by=DictGetItemGuardAccessor(processor) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[9].attn.processor, 93831581524080) # attn_parameters = set(inspect.signature(self.processor.__call__).parameters.keys()) # diffusers/src/diffusers/models/attention_processor.py:479 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[9].attn.processor, 140581770193520) # return self.processor( # diffusers/src/diffusers/models/attention_processor.py:490 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].attn._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].attn._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].attn._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].attn._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].attn.forward, accessed_by=GetAttrGuardAccessor(forward) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].attn.forward, accessed_by=FuncDefaultsGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].attn.forward.__defaults__[0], accessed_by=GetItemGuardAccessor(0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[9].attn.forward.__defaults__[0], 140591004478624) # batch_size, _, _ = hidden_states.shape if encoder_hidden_states is None else encoder_hidden_states.shape # diffusers/src/diffusers/models/attention_processor.py:1713 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].norm, accessed_by=DictGetItemGuardAccessor(norm) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[9].norm, 140581770193184) # norm_hidden_states, gate = self.norm(hidden_states, emb=temb) # diffusers/src/diffusers/models/transformers/transformer_flux.py:88 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].norm.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[9].norm.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].norm.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[9].norm.training, 140591004393440) # norm_hidden_states, gate = self.norm(hidden_states, emb=temb) # diffusers/src/diffusers/models/transformers/transformer_flux.py:88 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].norm._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].norm.norm, accessed_by=DictGetItemGuardAccessor(norm) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[9].norm.norm, 140581770193328) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:171 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].norm.norm.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].norm.norm.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[9].norm.norm.training, 140591004393440) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:171 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].norm.silu, accessed_by=DictGetItemGuardAccessor(silu) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[9].norm.silu, 140581770193232) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].norm.silu.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].norm.silu.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[9].norm.silu.training, 140591004393440) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].norm.linear, accessed_by=DictGetItemGuardAccessor(linear) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[9].norm.linear, 140533112569664) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].norm.linear.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[9].norm.linear.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].norm.linear.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[9].norm.linear.training, 140591004393408) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].norm.linear._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].norm.linear.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[9].norm.linear.lora_A, 140533112555840) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].norm.linear.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].norm.linear.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[9].norm.linear.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].norm.linear.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[9].norm.linear.lora_A['default_0'], 140533112516192) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].norm.linear.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].norm.linear.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[9].norm.linear.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].norm.linear.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].norm.linear.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[9].norm.linear.lora_A['default_0'].weight, 140537318568272) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].norm.linear.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[9].norm.linear.lora_B, 140533112510288) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].norm.linear.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].norm.linear.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[9].norm.linear.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].norm.linear.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[9].norm.linear.lora_B['default_0'], 140533112515664) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].norm.linear.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].norm.linear.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[9].norm.linear.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].norm.linear.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[9].norm.linear.base_layer, 140581770193280) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].norm.linear.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].norm.linear.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[9].norm.linear.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].norm.linear.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[9].norm.linear.lora_dropout, 140533112568800) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].norm.linear.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].norm.linear.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[9].norm.linear.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].norm.linear.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[9].norm.linear.lora_dropout['default_0'], 140533112561312) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].norm.linear.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].norm.linear.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[9].norm.linear.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].norm.linear.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[9].norm.linear.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[9].norm.linear.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].norm.linear.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[9].norm.linear.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].norm.linear.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[9].norm.linear.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[9].norm.linear.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].norm.linear.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[9].norm.linear.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].norm.linear._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].norm.linear._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].norm.linear.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[9].norm.linear.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[9].norm.linear.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].norm.linear._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[9].norm.linear._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].norm.linear._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].norm.linear._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].norm.linear._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[9].norm.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[9].norm.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].norm._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].norm._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].norm._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].norm._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].act_mlp, accessed_by=DictGetItemGuardAccessor(act_mlp) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[9].act_mlp, 140581770193424) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].act_mlp.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].act_mlp.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[9].act_mlp.training, 140591004393440) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].proj_mlp, accessed_by=DictGetItemGuardAccessor(proj_mlp) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[9].proj_mlp, 140533112507264) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].proj_mlp.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[9].proj_mlp.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].proj_mlp.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[9].proj_mlp.training, 140591004393408) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].proj_mlp._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].proj_mlp.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[9].proj_mlp.lora_A, 140533112516720) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].proj_mlp.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].proj_mlp.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[9].proj_mlp.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].proj_mlp.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[9].proj_mlp.lora_A['default_0'], 140533112516000) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].proj_mlp.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].proj_mlp.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[9].proj_mlp.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].proj_mlp.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].proj_mlp.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[9].proj_mlp.lora_A['default_0'].weight, 140537318564672) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].proj_mlp.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[9].proj_mlp.lora_B, 140533112516672) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].proj_mlp.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].proj_mlp.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[9].proj_mlp.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].proj_mlp.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[9].proj_mlp.lora_B['default_0'], 140533112514896) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].proj_mlp.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].proj_mlp.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[9].proj_mlp.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].proj_mlp.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[9].proj_mlp.base_layer, 140581770193376) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].proj_mlp.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].proj_mlp.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[9].proj_mlp.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].proj_mlp.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[9].proj_mlp.lora_dropout, 140533112507216) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].proj_mlp.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].proj_mlp.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[9].proj_mlp.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].proj_mlp.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[9].proj_mlp.lora_dropout['default_0'], 140533112507360) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].proj_mlp.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].proj_mlp.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[9].proj_mlp.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].proj_mlp.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[9].proj_mlp.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[9].proj_mlp.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].proj_mlp.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[9].proj_mlp.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].proj_mlp.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[9].proj_mlp.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[9].proj_mlp.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].proj_mlp.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[9].proj_mlp.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].proj_mlp._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].proj_mlp._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].proj_mlp.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[9].proj_mlp.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[9].proj_mlp.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].proj_mlp._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[9].proj_mlp._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].proj_mlp._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].proj_mlp._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].proj_mlp._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[9].proj_mlp._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[9].proj_mlp._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].proj_out, accessed_by=DictGetItemGuardAccessor(proj_out) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[9].proj_out, 140533112515568) # hidden_states = gate * self.proj_out(hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:98 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].proj_out.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[9].proj_out.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].proj_out.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[9].proj_out.training, 140591004393408) # hidden_states = gate * self.proj_out(hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:98 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].proj_out._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].proj_out.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[9].proj_out.lora_A, 140533112517584) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].proj_out.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].proj_out.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[9].proj_out.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].proj_out.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[9].proj_out.lora_A['default_0'], 140533112515232) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].proj_out.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].proj_out.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[9].proj_out.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].proj_out.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].proj_out.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[9].proj_out.lora_A['default_0'].weight, 140537318566912) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].proj_out.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[9].proj_out.lora_B, 140533112517632) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].proj_out.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].proj_out.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[9].proj_out.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].proj_out.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[9].proj_out.lora_B['default_0'], 140533112513648) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].proj_out.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].proj_out.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[9].proj_out.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].proj_out.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[9].proj_out.base_layer, 140581770193472) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].proj_out.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].proj_out.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[9].proj_out.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].proj_out.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[9].proj_out.lora_dropout, 140533112516336) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].proj_out.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].proj_out.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[9].proj_out.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].proj_out.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[9].proj_out.lora_dropout['default_0'], 140533112515856) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].proj_out.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].proj_out.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[9].proj_out.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].proj_out.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[9].proj_out.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[9].proj_out.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].proj_out.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[9].proj_out.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].proj_out.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[9].proj_out.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[9].proj_out.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].proj_out.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[9].proj_out.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].proj_out._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].proj_out._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].proj_out.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[9].proj_out.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[9].proj_out.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].proj_out._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[9].proj_out._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].proj_out._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].proj_out._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].proj_out._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[9].proj_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[9].proj_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10], accessed_by=GetItemGuardAccessor(10) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[10], 140581770193136) # for index_block, block in enumerate(self.single_transformer_blocks): # diffusers/src/diffusers/models/transformers/transformer_flux.py:509 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[10].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[10].training, 140591004393440) # for index_block, block in enumerate(self.single_transformer_blocks): # diffusers/src/diffusers/models/transformers/transformer_flux.py:509 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].attn, accessed_by=DictGetItemGuardAccessor(attn) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[10].attn, 140581770194336) # attn_output = self.attn( # diffusers/src/diffusers/models/transformers/transformer_flux.py:91 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].attn.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[10].attn.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].attn.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[10].attn.training, 140591004393440) # attn_output = self.attn( # diffusers/src/diffusers/models/transformers/transformer_flux.py:91 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].attn._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].attn.to_k, accessed_by=DictGetItemGuardAccessor(to_k) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[10].attn.to_k, 140533112601136) # key = attn.to_k(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1717 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].attn.to_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[10].attn.to_k.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].attn.to_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[10].attn.to_k.training, 140591004393408) # key = attn.to_k(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1717 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].attn.to_k._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].attn.to_k.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[10].attn.to_k.lora_A, 140533112600272) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].attn.to_k.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].attn.to_k.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[10].attn.to_k.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].attn.to_k.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[10].attn.to_k.lora_A['default_0'], 140533112600608) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].attn.to_k.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].attn.to_k.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[10].attn.to_k.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].attn.to_k.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].attn.to_k.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[10].attn.to_k.lora_A['default_0'].weight, 140537318446704) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].attn.to_k.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[10].attn.to_k.lora_B, 140533112595088) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].attn.to_k.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].attn.to_k.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[10].attn.to_k.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].attn.to_k.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[10].attn.to_k.lora_B['default_0'], 140533112594992) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].attn.to_k.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].attn.to_k.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[10].attn.to_k.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].attn.to_k.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[10].attn.to_k.base_layer, 140581770194480) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].attn.to_k.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].attn.to_k.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[10].attn.to_k.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].attn.to_k.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[10].attn.to_k.lora_dropout, 140533112590720) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].attn.to_k.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].attn.to_k.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[10].attn.to_k.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].attn.to_k.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[10].attn.to_k.lora_dropout['default_0'], 140533112593696) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].attn.to_k.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].attn.to_k.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[10].attn.to_k.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].attn.to_k.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[10].attn.to_k.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[10].attn.to_k.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].attn.to_k.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[10].attn.to_k.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].attn.to_k.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[10].attn.to_k.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[10].attn.to_k.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].attn.to_k.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[10].attn.to_k.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].attn.to_k._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].attn.to_k._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].attn.to_k.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[10].attn.to_k.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[10].attn.to_k.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].attn.to_k._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[10].attn.to_k._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].attn.to_k._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].attn.to_k._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].attn.to_k._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[10].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[10].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].attn.to_q, accessed_by=DictGetItemGuardAccessor(to_q) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[10].attn.to_q, 140533116467856) # query = attn.to_q(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1716 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].attn.to_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[10].attn.to_q.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].attn.to_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[10].attn.to_q.training, 140591004393408) # query = attn.to_q(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1716 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].attn.to_q._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].attn.to_q.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[10].attn.to_q.lora_A, 140533116461424) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].attn.to_q.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].attn.to_q.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[10].attn.to_q.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].attn.to_q.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[10].attn.to_q.lora_A['default_0'], 140533116460080) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].attn.to_q.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].attn.to_q.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[10].attn.to_q.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].attn.to_q.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].attn.to_q.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[10].attn.to_q.lora_A['default_0'].weight, 140537318450864) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].attn.to_q.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[10].attn.to_q.lora_B, 140533116463152) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].attn.to_q.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].attn.to_q.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[10].attn.to_q.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].attn.to_q.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[10].attn.to_q.lora_B['default_0'], 140533116459216) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].attn.to_q.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].attn.to_q.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[10].attn.to_q.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].attn.to_q.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[10].attn.to_q.base_layer, 140581770194576) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].attn.to_q.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].attn.to_q.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[10].attn.to_q.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].attn.to_q.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[10].attn.to_q.lora_dropout, 140533116466560) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].attn.to_q.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].attn.to_q.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[10].attn.to_q.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].attn.to_q.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[10].attn.to_q.lora_dropout['default_0'], 140533116465792) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].attn.to_q.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].attn.to_q.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[10].attn.to_q.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].attn.to_q.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[10].attn.to_q.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[10].attn.to_q.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].attn.to_q.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[10].attn.to_q.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].attn.to_q.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[10].attn.to_q.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[10].attn.to_q.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].attn.to_q.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[10].attn.to_q.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].attn.to_q._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].attn.to_q._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].attn.to_q.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[10].attn.to_q.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[10].attn.to_q.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].attn.to_q._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[10].attn.to_q._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].attn.to_q._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].attn.to_q._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].attn.to_q._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[10].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[10].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].attn.to_v, accessed_by=DictGetItemGuardAccessor(to_v) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[10].attn.to_v, 140533112600416) # value = attn.to_v(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1718 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].attn.to_v.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[10].attn.to_v.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].attn.to_v.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[10].attn.to_v.training, 140591004393408) # value = attn.to_v(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1718 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].attn.to_v._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].attn.to_v.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[10].attn.to_v.lora_A, 140533112594848) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].attn.to_v.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].attn.to_v.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[10].attn.to_v.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].attn.to_v.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[10].attn.to_v.lora_A['default_0'], 140533112601664) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].attn.to_v.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].attn.to_v.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[10].attn.to_v.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].attn.to_v.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].attn.to_v.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[10].attn.to_v.lora_A['default_0'].weight, 140537320372512) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].attn.to_v.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[10].attn.to_v.lora_B, 140533112600320) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].attn.to_v.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].attn.to_v.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[10].attn.to_v.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].attn.to_v.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[10].attn.to_v.lora_B['default_0'], 140533112596000) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].attn.to_v.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].attn.to_v.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[10].attn.to_v.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].attn.to_v.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[10].attn.to_v.base_layer, 140581770194624) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].attn.to_v.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].attn.to_v.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[10].attn.to_v.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].attn.to_v.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[10].attn.to_v.lora_dropout, 140533112594800) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].attn.to_v.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].attn.to_v.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[10].attn.to_v.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].attn.to_v.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[10].attn.to_v.lora_dropout['default_0'], 140533112590192) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].attn.to_v.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].attn.to_v.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[10].attn.to_v.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].attn.to_v.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[10].attn.to_v.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[10].attn.to_v.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].attn.to_v.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[10].attn.to_v.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].attn.to_v.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[10].attn.to_v.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[10].attn.to_v.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].attn.to_v.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[10].attn.to_v.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].attn.to_v._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].attn.to_v._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].attn.to_v.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[10].attn.to_v.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[10].attn.to_v.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].attn.to_v._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[10].attn.to_v._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].attn.to_v._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].attn.to_v._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].attn.to_v._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[10].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[10].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].attn.norm_k, accessed_by=DictGetItemGuardAccessor(norm_k) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[10].attn.norm_k, 140581770194528) # if attn.norm_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1729 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].attn.norm_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[10].attn.norm_k.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].attn.norm_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[10].attn.norm_k.training, 140591004393440) # if attn.norm_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1729 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].attn.norm_k.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[10].attn.norm_k.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].attn.norm_k._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].attn.norm_k.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[10].attn.norm_k.weight, 140581766061632) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].attn.norm_k._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].attn.norm_k._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].attn.norm_k._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].attn.norm_k._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].attn.norm_q, accessed_by=DictGetItemGuardAccessor(norm_q) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[10].attn.norm_q, 140581770194384) # if attn.norm_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1727 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].attn.norm_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[10].attn.norm_q.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].attn.norm_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[10].attn.norm_q.training, 140591004393440) # if attn.norm_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1727 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].attn.norm_q.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[10].attn.norm_q.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].attn.norm_q._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].attn.norm_q.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[10].attn.norm_q.weight, 140581765892352) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].attn.norm_q._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].attn.norm_q._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].attn.norm_q._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].attn.norm_q._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].attn.heads, accessed_by=DictGetItemGuardAccessor(heads) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[10].attn.heads == 24 # head_dim = inner_dim // attn.heads # diffusers/src/diffusers/models/attention_processor.py:1721 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].attn.processor, accessed_by=DictGetItemGuardAccessor(processor) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[10].attn.processor, 93831581524080) # attn_parameters = set(inspect.signature(self.processor.__call__).parameters.keys()) # diffusers/src/diffusers/models/attention_processor.py:479 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[10].attn.processor, 140581770194288) # return self.processor( # diffusers/src/diffusers/models/attention_processor.py:490 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].attn._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].attn._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].attn._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].attn._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].attn.forward, accessed_by=GetAttrGuardAccessor(forward) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].attn.forward, accessed_by=FuncDefaultsGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].attn.forward.__defaults__[0], accessed_by=GetItemGuardAccessor(0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[10].attn.forward.__defaults__[0], 140591004478624) # batch_size, _, _ = hidden_states.shape if encoder_hidden_states is None else encoder_hidden_states.shape # diffusers/src/diffusers/models/attention_processor.py:1713 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].norm, accessed_by=DictGetItemGuardAccessor(norm) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[10].norm, 140581770193952) # norm_hidden_states, gate = self.norm(hidden_states, emb=temb) # diffusers/src/diffusers/models/transformers/transformer_flux.py:88 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].norm.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[10].norm.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].norm.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[10].norm.training, 140591004393440) # norm_hidden_states, gate = self.norm(hidden_states, emb=temb) # diffusers/src/diffusers/models/transformers/transformer_flux.py:88 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].norm._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].norm.norm, accessed_by=DictGetItemGuardAccessor(norm) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[10].norm.norm, 140581770194096) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:171 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].norm.norm.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].norm.norm.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[10].norm.norm.training, 140591004393440) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:171 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].norm.silu, accessed_by=DictGetItemGuardAccessor(silu) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[10].norm.silu, 140581770194000) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].norm.silu.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].norm.silu.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[10].norm.silu.training, 140591004393440) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].norm.linear, accessed_by=DictGetItemGuardAccessor(linear) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[10].norm.linear, 140533112504816) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].norm.linear.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[10].norm.linear.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].norm.linear.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[10].norm.linear.training, 140591004393408) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].norm.linear._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].norm.linear.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[10].norm.linear.lora_A, 140533115555216) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].norm.linear.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].norm.linear.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[10].norm.linear.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].norm.linear.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[10].norm.linear.lora_A['default_0'], 140533115564816) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].norm.linear.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].norm.linear.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[10].norm.linear.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].norm.linear.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].norm.linear.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[10].norm.linear.lora_A['default_0'].weight, 140537318449504) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].norm.linear.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[10].norm.linear.lora_B, 140533115562944) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].norm.linear.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].norm.linear.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[10].norm.linear.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].norm.linear.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[10].norm.linear.lora_B['default_0'], 140533115564288) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].norm.linear.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].norm.linear.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[10].norm.linear.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].norm.linear.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[10].norm.linear.base_layer, 140581770194048) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].norm.linear.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].norm.linear.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[10].norm.linear.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].norm.linear.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[10].norm.linear.lora_dropout, 140533115563088) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].norm.linear.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].norm.linear.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[10].norm.linear.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].norm.linear.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[10].norm.linear.lora_dropout['default_0'], 140533112505440) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].norm.linear.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].norm.linear.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[10].norm.linear.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].norm.linear.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[10].norm.linear.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[10].norm.linear.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].norm.linear.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[10].norm.linear.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].norm.linear.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[10].norm.linear.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[10].norm.linear.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].norm.linear.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[10].norm.linear.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].norm.linear._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].norm.linear._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].norm.linear.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[10].norm.linear.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[10].norm.linear.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].norm.linear._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[10].norm.linear._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].norm.linear._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].norm.linear._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].norm.linear._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[10].norm.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[10].norm.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].norm._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].norm._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].norm._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].norm._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].act_mlp, accessed_by=DictGetItemGuardAccessor(act_mlp) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[10].act_mlp, 140581770194192) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].act_mlp.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].act_mlp.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[10].act_mlp.training, 140591004393440) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].proj_mlp, accessed_by=DictGetItemGuardAccessor(proj_mlp) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[10].proj_mlp, 140533115565104) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].proj_mlp.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[10].proj_mlp.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].proj_mlp.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[10].proj_mlp.training, 140591004393408) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].proj_mlp._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].proj_mlp.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[10].proj_mlp.lora_A, 140533115555264) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].proj_mlp.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].proj_mlp.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[10].proj_mlp.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].proj_mlp.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[10].proj_mlp.lora_A['default_0'], 140533115561888) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].proj_mlp.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].proj_mlp.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[10].proj_mlp.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].proj_mlp.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].proj_mlp.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[10].proj_mlp.lora_A['default_0'].weight, 140537318452224) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].proj_mlp.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[10].proj_mlp.lora_B, 140533115552672) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].proj_mlp.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].proj_mlp.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[10].proj_mlp.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].proj_mlp.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[10].proj_mlp.lora_B['default_0'], 140533115561744) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].proj_mlp.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].proj_mlp.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[10].proj_mlp.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].proj_mlp.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[10].proj_mlp.base_layer, 140581770194144) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].proj_mlp.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].proj_mlp.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[10].proj_mlp.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].proj_mlp.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[10].proj_mlp.lora_dropout, 140533115562896) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].proj_mlp.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].proj_mlp.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[10].proj_mlp.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].proj_mlp.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[10].proj_mlp.lora_dropout['default_0'], 140533115557568) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].proj_mlp.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].proj_mlp.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[10].proj_mlp.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].proj_mlp.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[10].proj_mlp.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[10].proj_mlp.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].proj_mlp.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[10].proj_mlp.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].proj_mlp.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[10].proj_mlp.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[10].proj_mlp.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].proj_mlp.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[10].proj_mlp.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].proj_mlp._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].proj_mlp._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].proj_mlp.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[10].proj_mlp.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[10].proj_mlp.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].proj_mlp._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[10].proj_mlp._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].proj_mlp._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].proj_mlp._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].proj_mlp._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[10].proj_mlp._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[10].proj_mlp._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].proj_out, accessed_by=DictGetItemGuardAccessor(proj_out) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[10].proj_out, 140533115553008) # hidden_states = gate * self.proj_out(hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:98 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].proj_out.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[10].proj_out.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].proj_out.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[10].proj_out.training, 140591004393408) # hidden_states = gate * self.proj_out(hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:98 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].proj_out._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].proj_out.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[10].proj_out.lora_A, 140533115554592) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].proj_out.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].proj_out.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[10].proj_out.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].proj_out.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[10].proj_out.lora_A['default_0'], 140533116457968) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].proj_out.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].proj_out.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[10].proj_out.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].proj_out.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].proj_out.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[10].proj_out.lora_A['default_0'].weight, 140537318457104) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].proj_out.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[10].proj_out.lora_B, 140533115563040) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].proj_out.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].proj_out.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[10].proj_out.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].proj_out.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[10].proj_out.lora_B['default_0'], 140533116462816) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].proj_out.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].proj_out.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[10].proj_out.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].proj_out.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[10].proj_out.base_layer, 140581770194240) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].proj_out.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].proj_out.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[10].proj_out.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].proj_out.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[10].proj_out.lora_dropout, 140533115552624) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].proj_out.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].proj_out.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[10].proj_out.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].proj_out.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[10].proj_out.lora_dropout['default_0'], 140533115558576) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].proj_out.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].proj_out.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[10].proj_out.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].proj_out.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[10].proj_out.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[10].proj_out.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].proj_out.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[10].proj_out.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].proj_out.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[10].proj_out.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[10].proj_out.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].proj_out.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[10].proj_out.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].proj_out._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].proj_out._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].proj_out.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[10].proj_out.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[10].proj_out.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].proj_out._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[10].proj_out._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].proj_out._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].proj_out._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].proj_out._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[10].proj_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[10].proj_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11], accessed_by=GetItemGuardAccessor(11) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[11], 140581770193904) # for index_block, block in enumerate(self.single_transformer_blocks): # diffusers/src/diffusers/models/transformers/transformer_flux.py:509 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[11].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[11].training, 140591004393440) # for index_block, block in enumerate(self.single_transformer_blocks): # diffusers/src/diffusers/models/transformers/transformer_flux.py:509 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].attn, accessed_by=DictGetItemGuardAccessor(attn) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[11].attn, 140581770195152) # attn_output = self.attn( # diffusers/src/diffusers/models/transformers/transformer_flux.py:91 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].attn.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[11].attn.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].attn.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[11].attn.training, 140591004393440) # attn_output = self.attn( # diffusers/src/diffusers/models/transformers/transformer_flux.py:91 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].attn._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].attn.to_k, accessed_by=DictGetItemGuardAccessor(to_k) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[11].attn.to_k, 140533118463728) # key = attn.to_k(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1717 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].attn.to_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[11].attn.to_k.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].attn.to_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[11].attn.to_k.training, 140591004393408) # key = attn.to_k(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1717 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].attn.to_k._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].attn.to_k.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[11].attn.to_k.lora_A, 140533118538256) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].attn.to_k.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].attn.to_k.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[11].attn.to_k.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].attn.to_k.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[11].attn.to_k.lora_A['default_0'], 140533118545936) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].attn.to_k.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].attn.to_k.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[11].attn.to_k.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].attn.to_k.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].attn.to_k.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[11].attn.to_k.lora_A['default_0'].weight, 140537320238800) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].attn.to_k.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[11].attn.to_k.lora_B, 140533118543632) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].attn.to_k.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].attn.to_k.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[11].attn.to_k.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].attn.to_k.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[11].attn.to_k.lora_B['default_0'], 140533118548192) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].attn.to_k.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].attn.to_k.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[11].attn.to_k.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].attn.to_k.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[11].attn.to_k.base_layer, 140581770195344) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].attn.to_k.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].attn.to_k.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[11].attn.to_k.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].attn.to_k.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[11].attn.to_k.lora_dropout, 140533118457248) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].attn.to_k.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].attn.to_k.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[11].attn.to_k.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].attn.to_k.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[11].attn.to_k.lora_dropout['default_0'], 140533118460800) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].attn.to_k.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].attn.to_k.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[11].attn.to_k.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].attn.to_k.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[11].attn.to_k.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[11].attn.to_k.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].attn.to_k.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[11].attn.to_k.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].attn.to_k.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[11].attn.to_k.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[11].attn.to_k.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].attn.to_k.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[11].attn.to_k.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].attn.to_k._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].attn.to_k._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].attn.to_k.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[11].attn.to_k.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[11].attn.to_k.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].attn.to_k._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[11].attn.to_k._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].attn.to_k._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].attn.to_k._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].attn.to_k._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[11].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[11].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].attn.to_q, accessed_by=DictGetItemGuardAccessor(to_q) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[11].attn.to_q, 140533118466272) # query = attn.to_q(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1716 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].attn.to_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[11].attn.to_q.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].attn.to_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[11].attn.to_q.training, 140591004393408) # query = attn.to_q(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1716 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].attn.to_q._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].attn.to_q.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[11].attn.to_q.lora_A, 140533118459744) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].attn.to_q.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].attn.to_q.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[11].attn.to_q.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].attn.to_q.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[11].attn.to_q.lora_A['default_0'], 140533118459216) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].attn.to_q.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].attn.to_q.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[11].attn.to_q.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].attn.to_q.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].attn.to_q.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[11].attn.to_q.lora_A['default_0'].weight, 140537320360752) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].attn.to_q.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[11].attn.to_q.lora_B, 140533118465792) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].attn.to_q.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].attn.to_q.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[11].attn.to_q.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].attn.to_q.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[11].attn.to_q.lora_B['default_0'], 140533118458688) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].attn.to_q.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].attn.to_q.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[11].attn.to_q.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].attn.to_q.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[11].attn.to_q.base_layer, 140581770195440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].attn.to_q.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].attn.to_q.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[11].attn.to_q.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].attn.to_q.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[11].attn.to_q.lora_dropout, 140533118462864) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].attn.to_q.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].attn.to_q.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[11].attn.to_q.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].attn.to_q.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[11].attn.to_q.lora_dropout['default_0'], 140533118457344) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].attn.to_q.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].attn.to_q.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[11].attn.to_q.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].attn.to_q.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[11].attn.to_q.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[11].attn.to_q.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].attn.to_q.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[11].attn.to_q.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].attn.to_q.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[11].attn.to_q.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[11].attn.to_q.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].attn.to_q.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[11].attn.to_q.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].attn.to_q._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].attn.to_q._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].attn.to_q.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[11].attn.to_q.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[11].attn.to_q.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].attn.to_q._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[11].attn.to_q._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].attn.to_q._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].attn.to_q._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].attn.to_q._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[11].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[11].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].attn.to_v, accessed_by=DictGetItemGuardAccessor(to_v) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[11].attn.to_v, 140533118538112) # value = attn.to_v(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1718 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].attn.to_v.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[11].attn.to_v.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].attn.to_v.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[11].attn.to_v.training, 140591004393408) # value = attn.to_v(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1718 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].attn.to_v._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].attn.to_v.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[11].attn.to_v.lora_A, 140533118542144) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].attn.to_v.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].attn.to_v.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[11].attn.to_v.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].attn.to_v.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[11].attn.to_v.lora_A['default_0'], 140533118545168) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].attn.to_v.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].attn.to_v.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[11].attn.to_v.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].attn.to_v.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].attn.to_v.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[11].attn.to_v.lora_A['default_0'].weight, 140537320242880) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].attn.to_v.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[11].attn.to_v.lora_B, 140533118542960) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].attn.to_v.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].attn.to_v.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[11].attn.to_v.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].attn.to_v.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[11].attn.to_v.lora_B['default_0'], 140533118536192) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].attn.to_v.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].attn.to_v.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[11].attn.to_v.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].attn.to_v.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[11].attn.to_v.base_layer, 140581770195488) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].attn.to_v.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].attn.to_v.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[11].attn.to_v.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].attn.to_v.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[11].attn.to_v.lora_dropout, 140533118538592) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].attn.to_v.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].attn.to_v.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[11].attn.to_v.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].attn.to_v.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[11].attn.to_v.lora_dropout['default_0'], 140533118540368) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].attn.to_v.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].attn.to_v.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[11].attn.to_v.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].attn.to_v.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[11].attn.to_v.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[11].attn.to_v.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].attn.to_v.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[11].attn.to_v.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].attn.to_v.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[11].attn.to_v.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[11].attn.to_v.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].attn.to_v.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[11].attn.to_v.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].attn.to_v._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].attn.to_v._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].attn.to_v.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[11].attn.to_v.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[11].attn.to_v.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].attn.to_v._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[11].attn.to_v._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].attn.to_v._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].attn.to_v._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].attn.to_v._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[11].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[11].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].attn.norm_k, accessed_by=DictGetItemGuardAccessor(norm_k) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[11].attn.norm_k, 140581770195392) # if attn.norm_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1729 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].attn.norm_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[11].attn.norm_k.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].attn.norm_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[11].attn.norm_k.training, 140591004393440) # if attn.norm_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1729 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].attn.norm_k.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[11].attn.norm_k.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].attn.norm_k._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].attn.norm_k.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[11].attn.norm_k.weight, 140581783350912) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].attn.norm_k._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].attn.norm_k._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].attn.norm_k._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].attn.norm_k._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].attn.norm_q, accessed_by=DictGetItemGuardAccessor(norm_q) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[11].attn.norm_q, 140581770195200) # if attn.norm_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1727 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].attn.norm_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[11].attn.norm_q.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].attn.norm_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[11].attn.norm_q.training, 140591004393440) # if attn.norm_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1727 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].attn.norm_q.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[11].attn.norm_q.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].attn.norm_q._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].attn.norm_q.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[11].attn.norm_q.weight, 140581783354592) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].attn.norm_q._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].attn.norm_q._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].attn.norm_q._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].attn.norm_q._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].attn.heads, accessed_by=DictGetItemGuardAccessor(heads) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[11].attn.heads == 24 # head_dim = inner_dim // attn.heads # diffusers/src/diffusers/models/attention_processor.py:1721 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].attn.processor, accessed_by=DictGetItemGuardAccessor(processor) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[11].attn.processor, 93831581524080) # attn_parameters = set(inspect.signature(self.processor.__call__).parameters.keys()) # diffusers/src/diffusers/models/attention_processor.py:479 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[11].attn.processor, 140581770195104) # return self.processor( # diffusers/src/diffusers/models/attention_processor.py:490 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].attn._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].attn._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].attn._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].attn._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].attn.forward, accessed_by=GetAttrGuardAccessor(forward) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].attn.forward, accessed_by=FuncDefaultsGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].attn.forward.__defaults__[0], accessed_by=GetItemGuardAccessor(0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[11].attn.forward.__defaults__[0], 140591004478624) # batch_size, _, _ = hidden_states.shape if encoder_hidden_states is None else encoder_hidden_states.shape # diffusers/src/diffusers/models/attention_processor.py:1713 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].norm, accessed_by=DictGetItemGuardAccessor(norm) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[11].norm, 140581770194720) # norm_hidden_states, gate = self.norm(hidden_states, emb=temb) # diffusers/src/diffusers/models/transformers/transformer_flux.py:88 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].norm.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[11].norm.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].norm.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[11].norm.training, 140591004393440) # norm_hidden_states, gate = self.norm(hidden_states, emb=temb) # diffusers/src/diffusers/models/transformers/transformer_flux.py:88 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].norm._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].norm.norm, accessed_by=DictGetItemGuardAccessor(norm) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[11].norm.norm, 140581770194864) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:171 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].norm.norm.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].norm.norm.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[11].norm.norm.training, 140591004393440) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:171 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].norm.silu, accessed_by=DictGetItemGuardAccessor(silu) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[11].norm.silu, 140581770194768) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].norm.silu.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].norm.silu.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[11].norm.silu.training, 140591004393440) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].norm.linear, accessed_by=DictGetItemGuardAccessor(linear) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[11].norm.linear, 140533112597968) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].norm.linear.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[11].norm.linear.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].norm.linear.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[11].norm.linear.training, 140591004393408) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].norm.linear._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].norm.linear.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[11].norm.linear.lora_A, 140533112596960) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].norm.linear.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].norm.linear.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[11].norm.linear.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].norm.linear.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[11].norm.linear.lora_A['default_0'], 140533112590384) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].norm.linear.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].norm.linear.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[11].norm.linear.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].norm.linear.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].norm.linear.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[11].norm.linear.lora_A['default_0'].weight, 140537320368992) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].norm.linear.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[11].norm.linear.lora_B, 140533112596576) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].norm.linear.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].norm.linear.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[11].norm.linear.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].norm.linear.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[11].norm.linear.lora_B['default_0'], 140533112591440) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].norm.linear.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].norm.linear.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[11].norm.linear.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].norm.linear.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[11].norm.linear.base_layer, 140581770194816) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].norm.linear.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].norm.linear.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[11].norm.linear.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].norm.linear.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[11].norm.linear.lora_dropout, 140533112598736) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].norm.linear.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].norm.linear.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[11].norm.linear.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].norm.linear.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[11].norm.linear.lora_dropout['default_0'], 140533112602288) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].norm.linear.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].norm.linear.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[11].norm.linear.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].norm.linear.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[11].norm.linear.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[11].norm.linear.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].norm.linear.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[11].norm.linear.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].norm.linear.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[11].norm.linear.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[11].norm.linear.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].norm.linear.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[11].norm.linear.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].norm.linear._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].norm.linear._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].norm.linear.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[11].norm.linear.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[11].norm.linear.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].norm.linear._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[11].norm.linear._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].norm.linear._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].norm.linear._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].norm.linear._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[11].norm.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[11].norm.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].norm._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].norm._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].norm._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].norm._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].act_mlp, accessed_by=DictGetItemGuardAccessor(act_mlp) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[11].act_mlp, 140581770195008) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].act_mlp.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].act_mlp.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[11].act_mlp.training, 140591004393440) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].proj_mlp, accessed_by=DictGetItemGuardAccessor(proj_mlp) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[11].proj_mlp, 140533112600944) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].proj_mlp.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[11].proj_mlp.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].proj_mlp.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[11].proj_mlp.training, 140591004393408) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].proj_mlp._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].proj_mlp.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[11].proj_mlp.lora_A, 140533112600848) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].proj_mlp.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].proj_mlp.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[11].proj_mlp.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].proj_mlp.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[11].proj_mlp.lora_A['default_0'], 140533112598592) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].proj_mlp.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].proj_mlp.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[11].proj_mlp.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].proj_mlp.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].proj_mlp.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[11].proj_mlp.lora_A['default_0'].weight, 140537320359712) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].proj_mlp.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[11].proj_mlp.lora_B, 140533112587024) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].proj_mlp.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].proj_mlp.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[11].proj_mlp.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].proj_mlp.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[11].proj_mlp.lora_B['default_0'], 140533112599120) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].proj_mlp.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].proj_mlp.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[11].proj_mlp.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].proj_mlp.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[11].proj_mlp.base_layer, 140581770194912) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].proj_mlp.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].proj_mlp.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[11].proj_mlp.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].proj_mlp.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[11].proj_mlp.lora_dropout, 140533112594032) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].proj_mlp.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].proj_mlp.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[11].proj_mlp.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].proj_mlp.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[11].proj_mlp.lora_dropout['default_0'], 140533112595280) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].proj_mlp.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].proj_mlp.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[11].proj_mlp.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].proj_mlp.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[11].proj_mlp.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[11].proj_mlp.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].proj_mlp.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[11].proj_mlp.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].proj_mlp.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[11].proj_mlp.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[11].proj_mlp.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].proj_mlp.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[11].proj_mlp.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].proj_mlp._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].proj_mlp._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].proj_mlp.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[11].proj_mlp.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[11].proj_mlp.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].proj_mlp._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[11].proj_mlp._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].proj_mlp._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].proj_mlp._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].proj_mlp._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[11].proj_mlp._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[11].proj_mlp._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].proj_out, accessed_by=DictGetItemGuardAccessor(proj_out) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[11].proj_out, 140533118464448) # hidden_states = gate * self.proj_out(hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:98 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].proj_out.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[11].proj_out.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].proj_out.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[11].proj_out.training, 140591004393408) # hidden_states = gate * self.proj_out(hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:98 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].proj_out._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].proj_out.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[11].proj_out.lora_A, 140533118463296) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].proj_out.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].proj_out.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[11].proj_out.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].proj_out.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[11].proj_out.lora_A['default_0'], 140533118458160) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].proj_out.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].proj_out.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[11].proj_out.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].proj_out.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].proj_out.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[11].proj_out.lora_A['default_0'].weight, 140537320369952) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].proj_out.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[11].proj_out.lora_B, 140533118461376) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].proj_out.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].proj_out.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[11].proj_out.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].proj_out.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[11].proj_out.lora_B['default_0'], 140533118463152) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].proj_out.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].proj_out.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[11].proj_out.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].proj_out.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[11].proj_out.base_layer, 140581770195056) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].proj_out.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].proj_out.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[11].proj_out.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].proj_out.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[11].proj_out.lora_dropout, 140533118465072) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].proj_out.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].proj_out.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[11].proj_out.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].proj_out.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[11].proj_out.lora_dropout['default_0'], 140533118453792) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].proj_out.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].proj_out.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[11].proj_out.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].proj_out.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[11].proj_out.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[11].proj_out.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].proj_out.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[11].proj_out.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].proj_out.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[11].proj_out.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[11].proj_out.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].proj_out.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[11].proj_out.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].proj_out._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].proj_out._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].proj_out.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[11].proj_out.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[11].proj_out.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].proj_out._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[11].proj_out._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].proj_out._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].proj_out._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].proj_out._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[11].proj_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[11].proj_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12], accessed_by=GetItemGuardAccessor(12) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[12], 140581770194672) # for index_block, block in enumerate(self.single_transformer_blocks): # diffusers/src/diffusers/models/transformers/transformer_flux.py:509 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[12].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[12].training, 140591004393440) # for index_block, block in enumerate(self.single_transformer_blocks): # diffusers/src/diffusers/models/transformers/transformer_flux.py:509 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].attn, accessed_by=DictGetItemGuardAccessor(attn) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[12].attn, 140581770195968) # attn_output = self.attn( # diffusers/src/diffusers/models/transformers/transformer_flux.py:91 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].attn.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[12].attn.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].attn.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[12].attn.training, 140591004393440) # attn_output = self.attn( # diffusers/src/diffusers/models/transformers/transformer_flux.py:91 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].attn._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].attn.to_k, accessed_by=DictGetItemGuardAccessor(to_k) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[12].attn.to_k, 140533119737360) # key = attn.to_k(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1717 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].attn.to_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[12].attn.to_k.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].attn.to_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[12].attn.to_k.training, 140591004393408) # key = attn.to_k(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1717 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].attn.to_k._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].attn.to_k.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[12].attn.to_k.lora_A, 140533118180736) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].attn.to_k.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].attn.to_k.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[12].attn.to_k.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].attn.to_k.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[12].attn.to_k.lora_A['default_0'], 140533118188176) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].attn.to_k.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].attn.to_k.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[12].attn.to_k.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].attn.to_k.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].attn.to_k.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[12].attn.to_k.lora_A['default_0'].weight, 140537320229360) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].attn.to_k.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[12].attn.to_k.lora_B, 140533118180304) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].attn.to_k.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].attn.to_k.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[12].attn.to_k.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].attn.to_k.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[12].attn.to_k.lora_B['default_0'], 140533118187888) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].attn.to_k.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].attn.to_k.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[12].attn.to_k.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].attn.to_k.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[12].attn.to_k.base_layer, 140581770196112) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].attn.to_k.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].attn.to_k.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[12].attn.to_k.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].attn.to_k.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[12].attn.to_k.lora_dropout, 140533119732656) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].attn.to_k.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].attn.to_k.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[12].attn.to_k.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].attn.to_k.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[12].attn.to_k.lora_dropout['default_0'], 140533119743552) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].attn.to_k.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].attn.to_k.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[12].attn.to_k.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].attn.to_k.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[12].attn.to_k.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[12].attn.to_k.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].attn.to_k.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[12].attn.to_k.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].attn.to_k.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[12].attn.to_k.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[12].attn.to_k.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].attn.to_k.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[12].attn.to_k.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].attn.to_k._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].attn.to_k._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].attn.to_k.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[12].attn.to_k.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[12].attn.to_k.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].attn.to_k._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[12].attn.to_k._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].attn.to_k._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].attn.to_k._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].attn.to_k._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[12].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[12].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].attn.to_q, accessed_by=DictGetItemGuardAccessor(to_q) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[12].attn.to_q, 140533118407376) # query = attn.to_q(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1716 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].attn.to_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[12].attn.to_q.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].attn.to_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[12].attn.to_q.training, 140591004393408) # query = attn.to_q(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1716 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].attn.to_q._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].attn.to_q.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[12].attn.to_q.lora_A, 140533119736448) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].attn.to_q.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].attn.to_q.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[12].attn.to_q.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].attn.to_q.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[12].attn.to_q.lora_A['default_0'], 140533119734336) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].attn.to_q.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].attn.to_q.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[12].attn.to_q.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].attn.to_q.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].attn.to_q.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[12].attn.to_q.lora_A['default_0'].weight, 140537320235920) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].attn.to_q.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[12].attn.to_q.lora_B, 140533119744848) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].attn.to_q.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].attn.to_q.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[12].attn.to_q.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].attn.to_q.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[12].attn.to_q.lora_B['default_0'], 140533119745424) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].attn.to_q.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].attn.to_q.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[12].attn.to_q.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].attn.to_q.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[12].attn.to_q.base_layer, 140581770196208) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].attn.to_q.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].attn.to_q.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[12].attn.to_q.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].attn.to_q.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[12].attn.to_q.lora_dropout, 140533118407952) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].attn.to_q.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].attn.to_q.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[12].attn.to_q.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].attn.to_q.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[12].attn.to_q.lora_dropout['default_0'], 140533118409296) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].attn.to_q.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].attn.to_q.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[12].attn.to_q.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].attn.to_q.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[12].attn.to_q.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[12].attn.to_q.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].attn.to_q.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[12].attn.to_q.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].attn.to_q.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[12].attn.to_q.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[12].attn.to_q.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].attn.to_q.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[12].attn.to_q.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].attn.to_q._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].attn.to_q._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].attn.to_q.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[12].attn.to_q.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[12].attn.to_q.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].attn.to_q._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[12].attn.to_q._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].attn.to_q._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].attn.to_q._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].attn.to_q._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[12].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[12].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].attn.to_v, accessed_by=DictGetItemGuardAccessor(to_v) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[12].attn.to_v, 140533118180064) # value = attn.to_v(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1718 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].attn.to_v.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[12].attn.to_v.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].attn.to_v.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[12].attn.to_v.training, 140591004393408) # value = attn.to_v(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1718 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].attn.to_v._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].attn.to_v.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[12].attn.to_v.lora_A, 140533118182896) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].attn.to_v.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].attn.to_v.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[12].attn.to_v.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].attn.to_v.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[12].attn.to_v.lora_A['default_0'], 140533118187504) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].attn.to_v.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].attn.to_v.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[12].attn.to_v.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].attn.to_v.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].attn.to_v.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[12].attn.to_v.lora_A['default_0'].weight, 140537320052096) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].attn.to_v.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[12].attn.to_v.lora_B, 140533118181840) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].attn.to_v.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].attn.to_v.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[12].attn.to_v.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].attn.to_v.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[12].attn.to_v.lora_B['default_0'], 140533118186400) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].attn.to_v.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].attn.to_v.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[12].attn.to_v.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].attn.to_v.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[12].attn.to_v.base_layer, 140581770196256) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].attn.to_v.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].attn.to_v.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[12].attn.to_v.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].attn.to_v.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[12].attn.to_v.lora_dropout, 140533118187984) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].attn.to_v.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].attn.to_v.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[12].attn.to_v.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].attn.to_v.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[12].attn.to_v.lora_dropout['default_0'], 140533118174256) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].attn.to_v.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].attn.to_v.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[12].attn.to_v.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].attn.to_v.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[12].attn.to_v.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[12].attn.to_v.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].attn.to_v.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[12].attn.to_v.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].attn.to_v.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[12].attn.to_v.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[12].attn.to_v.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].attn.to_v.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[12].attn.to_v.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].attn.to_v._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].attn.to_v._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].attn.to_v.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[12].attn.to_v.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[12].attn.to_v.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].attn.to_v._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[12].attn.to_v._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].attn.to_v._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].attn.to_v._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].attn.to_v._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[12].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[12].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].attn.norm_k, accessed_by=DictGetItemGuardAccessor(norm_k) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[12].attn.norm_k, 140581770196160) # if attn.norm_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1729 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].attn.norm_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[12].attn.norm_k.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].attn.norm_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[12].attn.norm_k.training, 140591004393440) # if attn.norm_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1729 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].attn.norm_k.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[12].attn.norm_k.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].attn.norm_k._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].attn.norm_k.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[12].attn.norm_k.weight, 140581783352912) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].attn.norm_k._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].attn.norm_k._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].attn.norm_k._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].attn.norm_k._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].attn.norm_q, accessed_by=DictGetItemGuardAccessor(norm_q) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[12].attn.norm_q, 140581770196016) # if attn.norm_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1727 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].attn.norm_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[12].attn.norm_q.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].attn.norm_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[12].attn.norm_q.training, 140591004393440) # if attn.norm_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1727 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].attn.norm_q.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[12].attn.norm_q.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].attn.norm_q._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].attn.norm_q.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[12].attn.norm_q.weight, 140581783352432) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].attn.norm_q._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].attn.norm_q._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].attn.norm_q._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].attn.norm_q._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].attn.heads, accessed_by=DictGetItemGuardAccessor(heads) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[12].attn.heads == 24 # head_dim = inner_dim // attn.heads # diffusers/src/diffusers/models/attention_processor.py:1721 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].attn.processor, accessed_by=DictGetItemGuardAccessor(processor) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[12].attn.processor, 93831581524080) # attn_parameters = set(inspect.signature(self.processor.__call__).parameters.keys()) # diffusers/src/diffusers/models/attention_processor.py:479 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[12].attn.processor, 140581770195920) # return self.processor( # diffusers/src/diffusers/models/attention_processor.py:490 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].attn._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].attn._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].attn._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].attn._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].attn.forward, accessed_by=GetAttrGuardAccessor(forward) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].attn.forward, accessed_by=FuncDefaultsGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].attn.forward.__defaults__[0], accessed_by=GetItemGuardAccessor(0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[12].attn.forward.__defaults__[0], 140591004478624) # batch_size, _, _ = hidden_states.shape if encoder_hidden_states is None else encoder_hidden_states.shape # diffusers/src/diffusers/models/attention_processor.py:1713 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].norm, accessed_by=DictGetItemGuardAccessor(norm) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[12].norm, 140581770195584) # norm_hidden_states, gate = self.norm(hidden_states, emb=temb) # diffusers/src/diffusers/models/transformers/transformer_flux.py:88 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].norm.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[12].norm.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].norm.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[12].norm.training, 140591004393440) # norm_hidden_states, gate = self.norm(hidden_states, emb=temb) # diffusers/src/diffusers/models/transformers/transformer_flux.py:88 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].norm._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].norm.norm, accessed_by=DictGetItemGuardAccessor(norm) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[12].norm.norm, 140581770195728) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:171 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].norm.norm.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].norm.norm.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[12].norm.norm.training, 140591004393440) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:171 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].norm.silu, accessed_by=DictGetItemGuardAccessor(silu) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[12].norm.silu, 140581770195632) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].norm.silu.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].norm.silu.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[12].norm.silu.training, 140591004393440) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].norm.linear, accessed_by=DictGetItemGuardAccessor(linear) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[12].norm.linear, 140533118549584) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].norm.linear.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[12].norm.linear.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].norm.linear.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[12].norm.linear.training, 140591004393408) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].norm.linear._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].norm.linear.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[12].norm.linear.lora_A, 140533118547424) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].norm.linear.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].norm.linear.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[12].norm.linear.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].norm.linear.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[12].norm.linear.lora_A['default_0'], 140533118533696) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].norm.linear.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].norm.linear.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[12].norm.linear.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].norm.linear.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].norm.linear.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[12].norm.linear.lora_A['default_0'].weight, 140537320241120) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].norm.linear.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[12].norm.linear.lora_B, 140533118548816) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].norm.linear.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].norm.linear.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[12].norm.linear.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].norm.linear.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[12].norm.linear.lora_B['default_0'], 140533118540752) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].norm.linear.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].norm.linear.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[12].norm.linear.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].norm.linear.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[12].norm.linear.base_layer, 140581770195680) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].norm.linear.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].norm.linear.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[12].norm.linear.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].norm.linear.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[12].norm.linear.lora_dropout, 140533118549776) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].norm.linear.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].norm.linear.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[12].norm.linear.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].norm.linear.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[12].norm.linear.lora_dropout['default_0'], 140533118548240) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].norm.linear.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].norm.linear.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[12].norm.linear.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].norm.linear.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[12].norm.linear.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[12].norm.linear.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].norm.linear.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[12].norm.linear.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].norm.linear.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[12].norm.linear.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[12].norm.linear.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].norm.linear.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[12].norm.linear.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].norm.linear._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].norm.linear._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].norm.linear.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[12].norm.linear.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[12].norm.linear.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].norm.linear._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[12].norm.linear._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].norm.linear._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].norm.linear._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].norm.linear._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[12].norm.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[12].norm.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].norm._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].norm._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].norm._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].norm._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].act_mlp, accessed_by=DictGetItemGuardAccessor(act_mlp) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[12].act_mlp, 140581770195824) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].act_mlp.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].act_mlp.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[12].act_mlp.training, 140591004393440) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].proj_mlp, accessed_by=DictGetItemGuardAccessor(proj_mlp) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[12].proj_mlp, 140533118539792) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].proj_mlp.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[12].proj_mlp.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].proj_mlp.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[12].proj_mlp.training, 140591004393408) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].proj_mlp._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].proj_mlp.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[12].proj_mlp.lora_A, 140533118540080) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].proj_mlp.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].proj_mlp.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[12].proj_mlp.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].proj_mlp.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[12].proj_mlp.lora_A['default_0'], 140533118410400) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].proj_mlp.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].proj_mlp.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[12].proj_mlp.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].proj_mlp.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].proj_mlp.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[12].proj_mlp.lora_A['default_0'].weight, 140537320233920) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].proj_mlp.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[12].proj_mlp.lora_B, 140533125706656) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].proj_mlp.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].proj_mlp.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[12].proj_mlp.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].proj_mlp.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[12].proj_mlp.lora_B['default_0'], 140533118410496) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].proj_mlp.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].proj_mlp.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[12].proj_mlp.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].proj_mlp.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[12].proj_mlp.base_layer, 140581770195776) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].proj_mlp.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].proj_mlp.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[12].proj_mlp.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].proj_mlp.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[12].proj_mlp.lora_dropout, 140533118536288) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].proj_mlp.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].proj_mlp.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[12].proj_mlp.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].proj_mlp.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[12].proj_mlp.lora_dropout['default_0'], 140533118542000) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].proj_mlp.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].proj_mlp.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[12].proj_mlp.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].proj_mlp.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[12].proj_mlp.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[12].proj_mlp.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].proj_mlp.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[12].proj_mlp.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].proj_mlp.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[12].proj_mlp.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[12].proj_mlp.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].proj_mlp.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[12].proj_mlp.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].proj_mlp._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].proj_mlp._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].proj_mlp.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[12].proj_mlp.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[12].proj_mlp.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].proj_mlp._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[12].proj_mlp._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].proj_mlp._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].proj_mlp._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].proj_mlp._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[12].proj_mlp._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[12].proj_mlp._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].proj_out, accessed_by=DictGetItemGuardAccessor(proj_out) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[12].proj_out, 140533118417984) # hidden_states = gate * self.proj_out(hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:98 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].proj_out.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[12].proj_out.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].proj_out.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[12].proj_out.training, 140591004393408) # hidden_states = gate * self.proj_out(hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:98 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].proj_out._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].proj_out.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[12].proj_out.lora_A, 140533118417936) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].proj_out.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].proj_out.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[12].proj_out.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].proj_out.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[12].proj_out.lora_A['default_0'], 140533118414000) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].proj_out.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].proj_out.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[12].proj_out.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].proj_out.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].proj_out.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[12].proj_out.lora_A['default_0'].weight, 140537320232240) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].proj_out.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[12].proj_out.lora_B, 140533118416448) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].proj_out.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].proj_out.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[12].proj_out.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].proj_out.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[12].proj_out.lora_B['default_0'], 140533118408096) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].proj_out.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].proj_out.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[12].proj_out.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].proj_out.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[12].proj_out.base_layer, 140581770195872) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].proj_out.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].proj_out.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[12].proj_out.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].proj_out.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[12].proj_out.lora_dropout, 140533118413664) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].proj_out.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].proj_out.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[12].proj_out.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].proj_out.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[12].proj_out.lora_dropout['default_0'], 140533118417600) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].proj_out.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].proj_out.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[12].proj_out.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].proj_out.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[12].proj_out.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[12].proj_out.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].proj_out.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[12].proj_out.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].proj_out.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[12].proj_out.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[12].proj_out.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].proj_out.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[12].proj_out.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].proj_out._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].proj_out._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].proj_out.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[12].proj_out.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[12].proj_out.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].proj_out._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[12].proj_out._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].proj_out._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].proj_out._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].proj_out._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[12].proj_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[12].proj_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13], accessed_by=GetItemGuardAccessor(13) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[13], 140581770195536) # for index_block, block in enumerate(self.single_transformer_blocks): # diffusers/src/diffusers/models/transformers/transformer_flux.py:509 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[13].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[13].training, 140591004393440) # for index_block, block in enumerate(self.single_transformer_blocks): # diffusers/src/diffusers/models/transformers/transformer_flux.py:509 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].attn, accessed_by=DictGetItemGuardAccessor(attn) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[13].attn, 140581770196736) # attn_output = self.attn( # diffusers/src/diffusers/models/transformers/transformer_flux.py:91 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].attn.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[13].attn.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].attn.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[13].attn.training, 140591004393440) # attn_output = self.attn( # diffusers/src/diffusers/models/transformers/transformer_flux.py:91 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].attn._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].attn.to_k, accessed_by=DictGetItemGuardAccessor(to_k) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[13].attn.to_k, 140533118035008) # key = attn.to_k(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1717 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].attn.to_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[13].attn.to_k.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].attn.to_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[13].attn.to_k.training, 140591004393408) # key = attn.to_k(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1717 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].attn.to_k._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].attn.to_k.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[13].attn.to_k.lora_A, 140533118035920) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].attn.to_k.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].attn.to_k.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[13].attn.to_k.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].attn.to_k.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[13].attn.to_k.lora_A['default_0'], 140533118038032) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].attn.to_k.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].attn.to_k.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[13].attn.to_k.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].attn.to_k.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].attn.to_k.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[13].attn.to_k.lora_A['default_0'].weight, 140537320054576) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].attn.to_k.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[13].attn.to_k.lora_B, 140533118035632) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].attn.to_k.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].attn.to_k.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[13].attn.to_k.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].attn.to_k.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[13].attn.to_k.lora_B['default_0'], 140533118040336) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].attn.to_k.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].attn.to_k.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[13].attn.to_k.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].attn.to_k.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[13].attn.to_k.base_layer, 140581770196880) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].attn.to_k.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].attn.to_k.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[13].attn.to_k.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].attn.to_k.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[13].attn.to_k.lora_dropout, 140533118028576) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].attn.to_k.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].attn.to_k.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[13].attn.to_k.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].attn.to_k.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[13].attn.to_k.lora_dropout['default_0'], 140533118036400) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].attn.to_k.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].attn.to_k.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[13].attn.to_k.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].attn.to_k.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[13].attn.to_k.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[13].attn.to_k.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].attn.to_k.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[13].attn.to_k.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].attn.to_k.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[13].attn.to_k.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[13].attn.to_k.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].attn.to_k.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[13].attn.to_k.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].attn.to_k._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].attn.to_k._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].attn.to_k.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[13].attn.to_k.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[13].attn.to_k.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].attn.to_k._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[13].attn.to_k._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].attn.to_k._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].attn.to_k._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].attn.to_k._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[13].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[13].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].attn.to_q, accessed_by=DictGetItemGuardAccessor(to_q) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[13].attn.to_q, 140533118069312) # query = attn.to_q(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1716 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].attn.to_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[13].attn.to_q.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].attn.to_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[13].attn.to_q.training, 140591004393408) # query = attn.to_q(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1716 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].attn.to_q._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].attn.to_q.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[13].attn.to_q.lora_A, 140533118062976) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].attn.to_q.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].attn.to_q.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[13].attn.to_q.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].attn.to_q.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[13].attn.to_q.lora_A['default_0'], 140533118034576) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].attn.to_q.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].attn.to_q.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[13].attn.to_q.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].attn.to_q.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].attn.to_q.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[13].attn.to_q.lora_A['default_0'].weight, 140537320056176) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].attn.to_q.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[13].attn.to_q.lora_B, 140533118067776) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].attn.to_q.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].attn.to_q.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[13].attn.to_q.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].attn.to_q.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[13].attn.to_q.lora_B['default_0'], 140533118031360) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].attn.to_q.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].attn.to_q.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[13].attn.to_q.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].attn.to_q.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[13].attn.to_q.base_layer, 140581770196976) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].attn.to_q.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].attn.to_q.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[13].attn.to_q.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].attn.to_q.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[13].attn.to_q.lora_dropout, 140533118064896) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].attn.to_q.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].attn.to_q.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[13].attn.to_q.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].attn.to_q.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[13].attn.to_q.lora_dropout['default_0'], 140533118069408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].attn.to_q.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].attn.to_q.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[13].attn.to_q.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].attn.to_q.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[13].attn.to_q.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[13].attn.to_q.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].attn.to_q.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[13].attn.to_q.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].attn.to_q.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[13].attn.to_q.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[13].attn.to_q.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].attn.to_q.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[13].attn.to_q.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].attn.to_q._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].attn.to_q._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].attn.to_q.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[13].attn.to_q.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[13].attn.to_q.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].attn.to_q._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[13].attn.to_q._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].attn.to_q._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].attn.to_q._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].attn.to_q._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[13].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[13].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].attn.to_v, accessed_by=DictGetItemGuardAccessor(to_v) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[13].attn.to_v, 140533118038464) # value = attn.to_v(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1718 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].attn.to_v.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[13].attn.to_v.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].attn.to_v.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[13].attn.to_v.training, 140591004393408) # value = attn.to_v(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1718 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].attn.to_v._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].attn.to_v.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[13].attn.to_v.lora_A, 140533118035296) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].attn.to_v.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].attn.to_v.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[13].attn.to_v.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].attn.to_v.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[13].attn.to_v.lora_A['default_0'], 140533118032704) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].attn.to_v.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].attn.to_v.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[13].attn.to_v.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].attn.to_v.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].attn.to_v.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[13].attn.to_v.lora_A['default_0'].weight, 140537319878592) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].attn.to_v.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[13].attn.to_v.lora_B, 140533118041056) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].attn.to_v.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].attn.to_v.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[13].attn.to_v.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].attn.to_v.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[13].attn.to_v.lora_B['default_0'], 140533118040672) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].attn.to_v.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].attn.to_v.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[13].attn.to_v.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].attn.to_v.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[13].attn.to_v.base_layer, 140581770197024) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].attn.to_v.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].attn.to_v.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[13].attn.to_v.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].attn.to_v.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[13].attn.to_v.lora_dropout, 140533118037936) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].attn.to_v.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].attn.to_v.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[13].attn.to_v.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].attn.to_v.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[13].attn.to_v.lora_dropout['default_0'], 140533118038416) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].attn.to_v.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].attn.to_v.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[13].attn.to_v.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].attn.to_v.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[13].attn.to_v.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[13].attn.to_v.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].attn.to_v.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[13].attn.to_v.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].attn.to_v.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[13].attn.to_v.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[13].attn.to_v.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].attn.to_v.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[13].attn.to_v.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].attn.to_v._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].attn.to_v._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].attn.to_v.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[13].attn.to_v.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[13].attn.to_v.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].attn.to_v._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[13].attn.to_v._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].attn.to_v._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].attn.to_v._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].attn.to_v._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[13].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[13].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].attn.norm_k, accessed_by=DictGetItemGuardAccessor(norm_k) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[13].attn.norm_k, 140581770196928) # if attn.norm_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1729 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].attn.norm_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[13].attn.norm_k.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].attn.norm_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[13].attn.norm_k.training, 140591004393440) # if attn.norm_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1729 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].attn.norm_k.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[13].attn.norm_k.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].attn.norm_k._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].attn.norm_k.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[13].attn.norm_k.weight, 140581772773872) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].attn.norm_k._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].attn.norm_k._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].attn.norm_k._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].attn.norm_k._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].attn.norm_q, accessed_by=DictGetItemGuardAccessor(norm_q) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[13].attn.norm_q, 140581770196784) # if attn.norm_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1727 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].attn.norm_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[13].attn.norm_q.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].attn.norm_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[13].attn.norm_q.training, 140591004393440) # if attn.norm_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1727 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].attn.norm_q.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[13].attn.norm_q.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].attn.norm_q._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].attn.norm_q.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[13].attn.norm_q.weight, 140581772771872) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].attn.norm_q._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].attn.norm_q._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].attn.norm_q._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].attn.norm_q._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].attn.heads, accessed_by=DictGetItemGuardAccessor(heads) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[13].attn.heads == 24 # head_dim = inner_dim // attn.heads # diffusers/src/diffusers/models/attention_processor.py:1721 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].attn.processor, accessed_by=DictGetItemGuardAccessor(processor) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[13].attn.processor, 93831581524080) # attn_parameters = set(inspect.signature(self.processor.__call__).parameters.keys()) # diffusers/src/diffusers/models/attention_processor.py:479 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[13].attn.processor, 140581770196688) # return self.processor( # diffusers/src/diffusers/models/attention_processor.py:490 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].attn._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].attn._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].attn._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].attn._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].attn.forward, accessed_by=GetAttrGuardAccessor(forward) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].attn.forward, accessed_by=FuncDefaultsGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].attn.forward.__defaults__[0], accessed_by=GetItemGuardAccessor(0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[13].attn.forward.__defaults__[0], 140591004478624) # batch_size, _, _ = hidden_states.shape if encoder_hidden_states is None else encoder_hidden_states.shape # diffusers/src/diffusers/models/attention_processor.py:1713 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].norm, accessed_by=DictGetItemGuardAccessor(norm) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[13].norm, 140581770196352) # norm_hidden_states, gate = self.norm(hidden_states, emb=temb) # diffusers/src/diffusers/models/transformers/transformer_flux.py:88 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].norm.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[13].norm.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].norm.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[13].norm.training, 140591004393440) # norm_hidden_states, gate = self.norm(hidden_states, emb=temb) # diffusers/src/diffusers/models/transformers/transformer_flux.py:88 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].norm._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].norm.norm, accessed_by=DictGetItemGuardAccessor(norm) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[13].norm.norm, 140581770196496) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:171 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].norm.norm.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].norm.norm.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[13].norm.norm.training, 140591004393440) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:171 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].norm.silu, accessed_by=DictGetItemGuardAccessor(silu) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[13].norm.silu, 140581770196400) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].norm.silu.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].norm.silu.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[13].norm.silu.training, 140591004393440) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].norm.linear, accessed_by=DictGetItemGuardAccessor(linear) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[13].norm.linear, 140533118095792) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].norm.linear.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[13].norm.linear.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].norm.linear.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[13].norm.linear.training, 140591004393408) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].norm.linear._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].norm.linear.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[13].norm.linear.lora_A, 140533118101504) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].norm.linear.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].norm.linear.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[13].norm.linear.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].norm.linear.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[13].norm.linear.lora_A['default_0'], 140533118103472) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].norm.linear.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].norm.linear.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[13].norm.linear.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].norm.linear.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].norm.linear.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[13].norm.linear.lora_A['default_0'].weight, 140537320062896) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].norm.linear.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[13].norm.linear.lora_B, 140533118095600) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].norm.linear.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].norm.linear.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[13].norm.linear.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].norm.linear.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[13].norm.linear.lora_B['default_0'], 140533118103184) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].norm.linear.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].norm.linear.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[13].norm.linear.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].norm.linear.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[13].norm.linear.base_layer, 140581770196448) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].norm.linear.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].norm.linear.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[13].norm.linear.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].norm.linear.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[13].norm.linear.lora_dropout, 140533118101840) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].norm.linear.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].norm.linear.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[13].norm.linear.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].norm.linear.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[13].norm.linear.lora_dropout['default_0'], 140533118102752) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].norm.linear.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].norm.linear.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[13].norm.linear.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].norm.linear.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[13].norm.linear.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[13].norm.linear.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].norm.linear.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[13].norm.linear.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].norm.linear.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[13].norm.linear.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[13].norm.linear.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].norm.linear.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[13].norm.linear.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].norm.linear._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].norm.linear._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].norm.linear.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[13].norm.linear.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[13].norm.linear.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].norm.linear._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[13].norm.linear._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].norm.linear._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].norm.linear._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].norm.linear._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[13].norm.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[13].norm.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].norm._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].norm._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].norm._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].norm._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].act_mlp, accessed_by=DictGetItemGuardAccessor(act_mlp) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[13].act_mlp, 140581770196592) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].act_mlp.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].act_mlp.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[13].act_mlp.training, 140591004393440) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].proj_mlp, accessed_by=DictGetItemGuardAccessor(proj_mlp) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[13].proj_mlp, 140533118093056) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].proj_mlp.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[13].proj_mlp.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].proj_mlp.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[13].proj_mlp.training, 140591004393408) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].proj_mlp._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].proj_mlp.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[13].proj_mlp.lora_A, 140533119680864) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].proj_mlp.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].proj_mlp.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[13].proj_mlp.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].proj_mlp.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[13].proj_mlp.lora_A['default_0'], 140533119689888) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].proj_mlp.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].proj_mlp.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[13].proj_mlp.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].proj_mlp.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].proj_mlp.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[13].proj_mlp.lora_A['default_0'].weight, 140537320053376) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].proj_mlp.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[13].proj_mlp.lora_B, 140533119688256) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].proj_mlp.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].proj_mlp.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[13].proj_mlp.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].proj_mlp.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[13].proj_mlp.lora_B['default_0'], 140533119686288) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].proj_mlp.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].proj_mlp.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[13].proj_mlp.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].proj_mlp.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[13].proj_mlp.base_layer, 140581770196544) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].proj_mlp.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].proj_mlp.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[13].proj_mlp.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].proj_mlp.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[13].proj_mlp.lora_dropout, 140533118103712) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].proj_mlp.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].proj_mlp.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[13].proj_mlp.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].proj_mlp.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[13].proj_mlp.lora_dropout['default_0'], 140533118101936) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].proj_mlp.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].proj_mlp.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[13].proj_mlp.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].proj_mlp.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[13].proj_mlp.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[13].proj_mlp.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].proj_mlp.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[13].proj_mlp.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].proj_mlp.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[13].proj_mlp.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[13].proj_mlp.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].proj_mlp.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[13].proj_mlp.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].proj_mlp._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].proj_mlp._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].proj_mlp.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[13].proj_mlp.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[13].proj_mlp.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].proj_mlp._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[13].proj_mlp._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].proj_mlp._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].proj_mlp._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].proj_mlp._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[13].proj_mlp._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[13].proj_mlp._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].proj_out, accessed_by=DictGetItemGuardAccessor(proj_out) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[13].proj_out, 140533119696416) # hidden_states = gate * self.proj_out(hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:98 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].proj_out.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[13].proj_out.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].proj_out.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[13].proj_out.training, 140591004393408) # hidden_states = gate * self.proj_out(hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:98 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].proj_out._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].proj_out.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[13].proj_out.lora_A, 140533119693536) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].proj_out.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].proj_out.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[13].proj_out.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].proj_out.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[13].proj_out.lora_A['default_0'], 140533118059520) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].proj_out.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].proj_out.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[13].proj_out.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].proj_out.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].proj_out.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[13].proj_out.lora_A['default_0'].weight, 140537320060976) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].proj_out.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[13].proj_out.lora_B, 140533119691232) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].proj_out.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].proj_out.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[13].proj_out.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].proj_out.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[13].proj_out.lora_B['default_0'], 140533118059088) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].proj_out.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].proj_out.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[13].proj_out.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].proj_out.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[13].proj_out.base_layer, 140581770196640) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].proj_out.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].proj_out.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[13].proj_out.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].proj_out.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[13].proj_out.lora_dropout, 140533119696224) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].proj_out.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].proj_out.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[13].proj_out.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].proj_out.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[13].proj_out.lora_dropout['default_0'], 140533119696176) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].proj_out.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].proj_out.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[13].proj_out.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].proj_out.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[13].proj_out.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[13].proj_out.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].proj_out.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[13].proj_out.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].proj_out.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[13].proj_out.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[13].proj_out.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].proj_out.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[13].proj_out.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].proj_out._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].proj_out._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].proj_out.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[13].proj_out.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[13].proj_out.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].proj_out._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[13].proj_out._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].proj_out._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].proj_out._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].proj_out._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[13].proj_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[13].proj_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14], accessed_by=GetItemGuardAccessor(14) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[14], 140581770196304) # for index_block, block in enumerate(self.single_transformer_blocks): # diffusers/src/diffusers/models/transformers/transformer_flux.py:509 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[14].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[14].training, 140591004393440) # for index_block, block in enumerate(self.single_transformer_blocks): # diffusers/src/diffusers/models/transformers/transformer_flux.py:509 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].attn, accessed_by=DictGetItemGuardAccessor(attn) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[14].attn, 140581770197504) # attn_output = self.attn( # diffusers/src/diffusers/models/transformers/transformer_flux.py:91 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].attn.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[14].attn.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].attn.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[14].attn.training, 140591004393440) # attn_output = self.attn( # diffusers/src/diffusers/models/transformers/transformer_flux.py:91 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].attn._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].attn.to_k, accessed_by=DictGetItemGuardAccessor(to_k) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[14].attn.to_k, 140533117921760) # key = attn.to_k(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1717 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].attn.to_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[14].attn.to_k.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].attn.to_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[14].attn.to_k.training, 140591004393408) # key = attn.to_k(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1717 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].attn.to_k._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].attn.to_k.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[14].attn.to_k.lora_A, 140533117923968) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].attn.to_k.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].attn.to_k.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[14].attn.to_k.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].attn.to_k.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[14].attn.to_k.lora_A['default_0'], 140533117924208) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].attn.to_k.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].attn.to_k.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[14].attn.to_k.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].attn.to_k.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].attn.to_k.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[14].attn.to_k.lora_A['default_0'].weight, 140537319710992) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].attn.to_k.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[14].attn.to_k.lora_B, 140533117919216) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].attn.to_k.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].attn.to_k.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[14].attn.to_k.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].attn.to_k.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[14].attn.to_k.lora_B['default_0'], 140533117920176) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].attn.to_k.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].attn.to_k.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[14].attn.to_k.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].attn.to_k.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[14].attn.to_k.base_layer, 140581770197648) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].attn.to_k.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].attn.to_k.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[14].attn.to_k.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].attn.to_k.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[14].attn.to_k.lora_dropout, 140533117923056) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].attn.to_k.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].attn.to_k.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[14].attn.to_k.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].attn.to_k.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[14].attn.to_k.lora_dropout['default_0'], 140533117924064) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].attn.to_k.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].attn.to_k.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[14].attn.to_k.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].attn.to_k.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[14].attn.to_k.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[14].attn.to_k.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].attn.to_k.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[14].attn.to_k.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].attn.to_k.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[14].attn.to_k.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[14].attn.to_k.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].attn.to_k.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[14].attn.to_k.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].attn.to_k._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].attn.to_k._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].attn.to_k.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[14].attn.to_k.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[14].attn.to_k.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].attn.to_k._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[14].attn.to_k._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].attn.to_k._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].attn.to_k._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].attn.to_k._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[14].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[14].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].attn.to_q, accessed_by=DictGetItemGuardAccessor(to_q) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[14].attn.to_q, 140533118035488) # query = attn.to_q(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1716 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].attn.to_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[14].attn.to_q.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].attn.to_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[14].attn.to_q.training, 140591004393408) # query = attn.to_q(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1716 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].attn.to_q._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].attn.to_q.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[14].attn.to_q.lora_A, 140533118031744) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].attn.to_q.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].attn.to_q.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[14].attn.to_q.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].attn.to_q.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[14].attn.to_q.lora_A['default_0'], 140533117915568) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].attn.to_q.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].attn.to_q.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[14].attn.to_q.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].attn.to_q.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].attn.to_q.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[14].attn.to_q.lora_A['default_0'].weight, 140537319873232) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].attn.to_q.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[14].attn.to_q.lora_B, 140533118036880) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].attn.to_q.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].attn.to_q.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[14].attn.to_q.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].attn.to_q.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[14].attn.to_q.lora_B['default_0'], 140533117914272) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].attn.to_q.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].attn.to_q.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[14].attn.to_q.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].attn.to_q.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[14].attn.to_q.base_layer, 140581770197744) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].attn.to_q.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].attn.to_q.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[14].attn.to_q.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].attn.to_q.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[14].attn.to_q.lora_dropout, 140533118030976) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].attn.to_q.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].attn.to_q.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[14].attn.to_q.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].attn.to_q.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[14].attn.to_q.lora_dropout['default_0'], 140533118027568) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].attn.to_q.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].attn.to_q.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[14].attn.to_q.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].attn.to_q.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[14].attn.to_q.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[14].attn.to_q.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].attn.to_q.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[14].attn.to_q.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].attn.to_q.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[14].attn.to_q.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[14].attn.to_q.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].attn.to_q.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[14].attn.to_q.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].attn.to_q._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].attn.to_q._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].attn.to_q.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[14].attn.to_q.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[14].attn.to_q.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].attn.to_q._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[14].attn.to_q._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].attn.to_q._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].attn.to_q._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].attn.to_q._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[14].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[14].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].attn.to_v, accessed_by=DictGetItemGuardAccessor(to_v) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[14].attn.to_v, 140533117923872) # value = attn.to_v(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1718 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].attn.to_v.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[14].attn.to_v.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].attn.to_v.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[14].attn.to_v.training, 140591004393408) # value = attn.to_v(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1718 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].attn.to_v._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].attn.to_v.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[14].attn.to_v.lora_A, 140533117915616) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].attn.to_v.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].attn.to_v.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[14].attn.to_v.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].attn.to_v.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[14].attn.to_v.lora_A['default_0'], 140533117925216) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].attn.to_v.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].attn.to_v.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[14].attn.to_v.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].attn.to_v.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].attn.to_v.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[14].attn.to_v.lora_A['default_0'].weight, 140537319713152) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].attn.to_v.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[14].attn.to_v.lora_B, 140533117911728) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].attn.to_v.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].attn.to_v.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[14].attn.to_v.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].attn.to_v.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[14].attn.to_v.lora_B['default_0'], 140533117925120) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].attn.to_v.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].attn.to_v.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[14].attn.to_v.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].attn.to_v.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[14].attn.to_v.base_layer, 140581770197792) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].attn.to_v.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].attn.to_v.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[14].attn.to_v.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].attn.to_v.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[14].attn.to_v.lora_dropout, 140533117923728) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].attn.to_v.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].attn.to_v.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[14].attn.to_v.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].attn.to_v.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[14].attn.to_v.lora_dropout['default_0'], 140533117924448) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].attn.to_v.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].attn.to_v.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[14].attn.to_v.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].attn.to_v.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[14].attn.to_v.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[14].attn.to_v.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].attn.to_v.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[14].attn.to_v.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].attn.to_v.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[14].attn.to_v.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[14].attn.to_v.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].attn.to_v.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[14].attn.to_v.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].attn.to_v._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].attn.to_v._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].attn.to_v.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[14].attn.to_v.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[14].attn.to_v.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].attn.to_v._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[14].attn.to_v._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].attn.to_v._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].attn.to_v._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].attn.to_v._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[14].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[14].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].attn.norm_k, accessed_by=DictGetItemGuardAccessor(norm_k) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[14].attn.norm_k, 140581770197696) # if attn.norm_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1729 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].attn.norm_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[14].attn.norm_k.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].attn.norm_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[14].attn.norm_k.training, 140591004393440) # if attn.norm_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1729 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].attn.norm_k.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[14].attn.norm_k.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].attn.norm_k._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].attn.norm_k.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[14].attn.norm_k.weight, 140581772772272) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].attn.norm_k._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].attn.norm_k._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].attn.norm_k._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].attn.norm_k._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].attn.norm_q, accessed_by=DictGetItemGuardAccessor(norm_q) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[14].attn.norm_q, 140581770197552) # if attn.norm_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1727 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].attn.norm_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[14].attn.norm_q.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].attn.norm_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[14].attn.norm_q.training, 140591004393440) # if attn.norm_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1727 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].attn.norm_q.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[14].attn.norm_q.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].attn.norm_q._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].attn.norm_q.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[14].attn.norm_q.weight, 140581771722112) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].attn.norm_q._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].attn.norm_q._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].attn.norm_q._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].attn.norm_q._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].attn.heads, accessed_by=DictGetItemGuardAccessor(heads) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[14].attn.heads == 24 # head_dim = inner_dim // attn.heads # diffusers/src/diffusers/models/attention_processor.py:1721 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].attn.processor, accessed_by=DictGetItemGuardAccessor(processor) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[14].attn.processor, 93831581524080) # attn_parameters = set(inspect.signature(self.processor.__call__).parameters.keys()) # diffusers/src/diffusers/models/attention_processor.py:479 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[14].attn.processor, 140581770197456) # return self.processor( # diffusers/src/diffusers/models/attention_processor.py:490 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].attn._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].attn._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].attn._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].attn._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].attn.forward, accessed_by=GetAttrGuardAccessor(forward) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].attn.forward, accessed_by=FuncDefaultsGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].attn.forward.__defaults__[0], accessed_by=GetItemGuardAccessor(0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[14].attn.forward.__defaults__[0], 140591004478624) # batch_size, _, _ = hidden_states.shape if encoder_hidden_states is None else encoder_hidden_states.shape # diffusers/src/diffusers/models/attention_processor.py:1713 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].norm, accessed_by=DictGetItemGuardAccessor(norm) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[14].norm, 140581770197120) # norm_hidden_states, gate = self.norm(hidden_states, emb=temb) # diffusers/src/diffusers/models/transformers/transformer_flux.py:88 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].norm.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[14].norm.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].norm.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[14].norm.training, 140591004393440) # norm_hidden_states, gate = self.norm(hidden_states, emb=temb) # diffusers/src/diffusers/models/transformers/transformer_flux.py:88 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].norm._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].norm.norm, accessed_by=DictGetItemGuardAccessor(norm) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[14].norm.norm, 140581770197264) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:171 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].norm.norm.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].norm.norm.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[14].norm.norm.training, 140591004393440) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:171 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].norm.silu, accessed_by=DictGetItemGuardAccessor(silu) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[14].norm.silu, 140581770197168) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].norm.silu.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].norm.silu.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[14].norm.silu.training, 140591004393440) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].norm.linear, accessed_by=DictGetItemGuardAccessor(linear) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[14].norm.linear, 140533118036208) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].norm.linear.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[14].norm.linear.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].norm.linear.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[14].norm.linear.training, 140591004393408) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].norm.linear._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].norm.linear.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[14].norm.linear.lora_A, 140533118032272) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].norm.linear.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].norm.linear.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[14].norm.linear.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].norm.linear.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[14].norm.linear.lora_A['default_0'], 140533118031120) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].norm.linear.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].norm.linear.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[14].norm.linear.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].norm.linear.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].norm.linear.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[14].norm.linear.lora_A['default_0'].weight, 140537319873712) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].norm.linear.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[14].norm.linear.lora_B, 140533118026608) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].norm.linear.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].norm.linear.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[14].norm.linear.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].norm.linear.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[14].norm.linear.lora_B['default_0'], 140533118032320) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].norm.linear.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].norm.linear.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[14].norm.linear.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].norm.linear.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[14].norm.linear.base_layer, 140581770197216) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].norm.linear.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].norm.linear.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[14].norm.linear.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].norm.linear.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[14].norm.linear.lora_dropout, 140533118034480) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].norm.linear.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].norm.linear.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[14].norm.linear.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].norm.linear.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[14].norm.linear.lora_dropout['default_0'], 140533118035344) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].norm.linear.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].norm.linear.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[14].norm.linear.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].norm.linear.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[14].norm.linear.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[14].norm.linear.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].norm.linear.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[14].norm.linear.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].norm.linear.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[14].norm.linear.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[14].norm.linear.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].norm.linear.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[14].norm.linear.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].norm.linear._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].norm.linear._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].norm.linear.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[14].norm.linear.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[14].norm.linear.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].norm.linear._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[14].norm.linear._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].norm.linear._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].norm.linear._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].norm.linear._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[14].norm.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[14].norm.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].norm._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].norm._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].norm._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].norm._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].act_mlp, accessed_by=DictGetItemGuardAccessor(act_mlp) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[14].act_mlp, 140581770197360) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].act_mlp.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].act_mlp.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[14].act_mlp.training, 140591004393440) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].proj_mlp, accessed_by=DictGetItemGuardAccessor(proj_mlp) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[14].proj_mlp, 140533118032560) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].proj_mlp.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[14].proj_mlp.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].proj_mlp.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[14].proj_mlp.training, 140591004393408) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].proj_mlp._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].proj_mlp.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[14].proj_mlp.lora_A, 140533118033520) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].proj_mlp.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].proj_mlp.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[14].proj_mlp.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].proj_mlp.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[14].proj_mlp.lora_A['default_0'], 140533118029008) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].proj_mlp.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].proj_mlp.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[14].proj_mlp.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].proj_mlp.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].proj_mlp.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[14].proj_mlp.lora_A['default_0'].weight, 140537319877232) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].proj_mlp.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[14].proj_mlp.lora_B, 140533118026896) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].proj_mlp.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].proj_mlp.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[14].proj_mlp.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].proj_mlp.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[14].proj_mlp.lora_B['default_0'], 140533118025888) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].proj_mlp.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].proj_mlp.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[14].proj_mlp.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].proj_mlp.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[14].proj_mlp.base_layer, 140581770197312) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].proj_mlp.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].proj_mlp.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[14].proj_mlp.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].proj_mlp.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[14].proj_mlp.lora_dropout, 140533118032080) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].proj_mlp.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].proj_mlp.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[14].proj_mlp.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].proj_mlp.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[14].proj_mlp.lora_dropout['default_0'], 140533118031936) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].proj_mlp.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].proj_mlp.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[14].proj_mlp.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].proj_mlp.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[14].proj_mlp.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[14].proj_mlp.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].proj_mlp.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[14].proj_mlp.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].proj_mlp.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[14].proj_mlp.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[14].proj_mlp.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].proj_mlp.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[14].proj_mlp.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].proj_mlp._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].proj_mlp._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].proj_mlp.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[14].proj_mlp.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[14].proj_mlp.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].proj_mlp._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[14].proj_mlp._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].proj_mlp._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].proj_mlp._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].proj_mlp._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[14].proj_mlp._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[14].proj_mlp._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].proj_out, accessed_by=DictGetItemGuardAccessor(proj_out) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[14].proj_out, 140533118027616) # hidden_states = gate * self.proj_out(hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:98 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].proj_out.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[14].proj_out.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].proj_out.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[14].proj_out.training, 140591004393408) # hidden_states = gate * self.proj_out(hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:98 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].proj_out._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].proj_out.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[14].proj_out.lora_A, 140533118034432) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].proj_out.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].proj_out.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[14].proj_out.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].proj_out.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[14].proj_out.lora_A['default_0'], 140533118036736) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].proj_out.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].proj_out.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[14].proj_out.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].proj_out.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].proj_out.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[14].proj_out.lora_A['default_0'].weight, 140537319879232) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].proj_out.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[14].proj_out.lora_B, 140533118028144) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].proj_out.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].proj_out.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[14].proj_out.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].proj_out.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[14].proj_out.lora_B['default_0'], 140533118037792) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].proj_out.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].proj_out.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[14].proj_out.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].proj_out.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[14].proj_out.base_layer, 140581770197408) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].proj_out.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].proj_out.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[14].proj_out.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].proj_out.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[14].proj_out.lora_dropout, 140533118029056) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].proj_out.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].proj_out.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[14].proj_out.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].proj_out.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[14].proj_out.lora_dropout['default_0'], 140533118029536) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].proj_out.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].proj_out.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[14].proj_out.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].proj_out.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[14].proj_out.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[14].proj_out.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].proj_out.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[14].proj_out.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].proj_out.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[14].proj_out.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[14].proj_out.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].proj_out.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[14].proj_out.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].proj_out._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].proj_out._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].proj_out.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[14].proj_out.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[14].proj_out.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].proj_out._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[14].proj_out._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].proj_out._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].proj_out._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].proj_out._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[14].proj_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[14].proj_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15], accessed_by=GetItemGuardAccessor(15) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[15], 140581770197072) # for index_block, block in enumerate(self.single_transformer_blocks): # diffusers/src/diffusers/models/transformers/transformer_flux.py:509 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[15].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[15].training, 140591004393440) # for index_block, block in enumerate(self.single_transformer_blocks): # diffusers/src/diffusers/models/transformers/transformer_flux.py:509 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].attn, accessed_by=DictGetItemGuardAccessor(attn) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[15].attn, 140581770198272) # attn_output = self.attn( # diffusers/src/diffusers/models/transformers/transformer_flux.py:91 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].attn.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[15].attn.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].attn.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[15].attn.training, 140591004393440) # attn_output = self.attn( # diffusers/src/diffusers/models/transformers/transformer_flux.py:91 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].attn._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].attn.to_k, accessed_by=DictGetItemGuardAccessor(to_k) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[15].attn.to_k, 140533118375712) # key = attn.to_k(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1717 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].attn.to_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[15].attn.to_k.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].attn.to_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[15].attn.to_k.training, 140591004393408) # key = attn.to_k(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1717 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].attn.to_k._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].attn.to_k.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[15].attn.to_k.lora_A, 140533118376864) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].attn.to_k.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].attn.to_k.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[15].attn.to_k.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].attn.to_k.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[15].attn.to_k.lora_A['default_0'], 140533118374032) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].attn.to_k.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].attn.to_k.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[15].attn.to_k.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].attn.to_k.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].attn.to_k.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[15].attn.to_k.lora_A['default_0'].weight, 140537319577280) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].attn.to_k.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[15].attn.to_k.lora_B, 140533118378112) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].attn.to_k.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].attn.to_k.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[15].attn.to_k.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].attn.to_k.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[15].attn.to_k.lora_B['default_0'], 140533118373840) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].attn.to_k.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].attn.to_k.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[15].attn.to_k.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].attn.to_k.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[15].attn.to_k.base_layer, 140581770198416) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].attn.to_k.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].attn.to_k.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[15].attn.to_k.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].attn.to_k.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[15].attn.to_k.lora_dropout, 140533118380224) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].attn.to_k.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].attn.to_k.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[15].attn.to_k.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].attn.to_k.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[15].attn.to_k.lora_dropout['default_0'], 140533118384688) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].attn.to_k.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].attn.to_k.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[15].attn.to_k.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].attn.to_k.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[15].attn.to_k.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[15].attn.to_k.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].attn.to_k.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[15].attn.to_k.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].attn.to_k.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[15].attn.to_k.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[15].attn.to_k.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].attn.to_k.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[15].attn.to_k.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].attn.to_k._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].attn.to_k._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].attn.to_k.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[15].attn.to_k.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[15].attn.to_k.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].attn.to_k._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[15].attn.to_k._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].attn.to_k._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].attn.to_k._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].attn.to_k._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[15].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[15].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].attn.to_q, accessed_by=DictGetItemGuardAccessor(to_q) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[15].attn.to_q, 140533117959136) # query = attn.to_q(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1716 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].attn.to_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[15].attn.to_q.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].attn.to_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[15].attn.to_q.training, 140591004393408) # query = attn.to_q(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1716 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].attn.to_q._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].attn.to_q.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[15].attn.to_q.lora_A, 140533117946320) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].attn.to_q.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].attn.to_q.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[15].attn.to_q.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].attn.to_q.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[15].attn.to_q.lora_A['default_0'], 140533118370480) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].attn.to_q.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].attn.to_q.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[15].attn.to_q.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].attn.to_q.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].attn.to_q.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[15].attn.to_q.lora_A['default_0'].weight, 140537319705072) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].attn.to_q.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[15].attn.to_q.lora_B, 140533117946224) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].attn.to_q.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].attn.to_q.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[15].attn.to_q.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].attn.to_q.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[15].attn.to_q.lora_B['default_0'], 140533118370624) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].attn.to_q.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].attn.to_q.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[15].attn.to_q.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].attn.to_q.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[15].attn.to_q.base_layer, 140581770198512) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].attn.to_q.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].attn.to_q.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[15].attn.to_q.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].attn.to_q.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[15].attn.to_q.lora_dropout, 140533117958224) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].attn.to_q.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].attn.to_q.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[15].attn.to_q.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].attn.to_q.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[15].attn.to_q.lora_dropout['default_0'], 140533117947472) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].attn.to_q.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].attn.to_q.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[15].attn.to_q.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].attn.to_q.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[15].attn.to_q.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[15].attn.to_q.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].attn.to_q.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[15].attn.to_q.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].attn.to_q.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[15].attn.to_q.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[15].attn.to_q.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].attn.to_q.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[15].attn.to_q.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].attn.to_q._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].attn.to_q._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].attn.to_q.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[15].attn.to_q.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[15].attn.to_q.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].attn.to_q._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[15].attn.to_q._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].attn.to_q._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].attn.to_q._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].attn.to_q._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[15].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[15].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].attn.to_v, accessed_by=DictGetItemGuardAccessor(to_v) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[15].attn.to_v, 140533128157152) # value = attn.to_v(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1718 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].attn.to_v.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[15].attn.to_v.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].attn.to_v.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[15].attn.to_v.training, 140591004393408) # value = attn.to_v(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1718 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].attn.to_v._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].attn.to_v.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[15].attn.to_v.lora_A, 140533128157344) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].attn.to_v.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].attn.to_v.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[15].attn.to_v.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].attn.to_v.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[15].attn.to_v.lora_A['default_0'], 140533128155856) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].attn.to_v.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].attn.to_v.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[15].attn.to_v.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].attn.to_v.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].attn.to_v.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[15].attn.to_v.lora_A['default_0'].weight, 140537319587440) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].attn.to_v.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[15].attn.to_v.lora_B, 140533128158976) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].attn.to_v.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].attn.to_v.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[15].attn.to_v.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].attn.to_v.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[15].attn.to_v.lora_B['default_0'], 140533128159552) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].attn.to_v.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].attn.to_v.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[15].attn.to_v.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].attn.to_v.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[15].attn.to_v.base_layer, 140581770198560) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].attn.to_v.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].attn.to_v.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[15].attn.to_v.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].attn.to_v.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[15].attn.to_v.lora_dropout, 140533128157728) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].attn.to_v.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].attn.to_v.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[15].attn.to_v.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].attn.to_v.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[15].attn.to_v.lora_dropout['default_0'], 140533128162432) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].attn.to_v.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].attn.to_v.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[15].attn.to_v.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].attn.to_v.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[15].attn.to_v.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[15].attn.to_v.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].attn.to_v.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[15].attn.to_v.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].attn.to_v.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[15].attn.to_v.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[15].attn.to_v.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].attn.to_v.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[15].attn.to_v.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].attn.to_v._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].attn.to_v._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].attn.to_v.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[15].attn.to_v.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[15].attn.to_v.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].attn.to_v._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[15].attn.to_v._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].attn.to_v._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].attn.to_v._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].attn.to_v._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[15].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[15].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].attn.norm_k, accessed_by=DictGetItemGuardAccessor(norm_k) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[15].attn.norm_k, 140581770198464) # if attn.norm_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1729 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].attn.norm_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[15].attn.norm_k.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].attn.norm_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[15].attn.norm_k.training, 140591004393440) # if attn.norm_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1729 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].attn.norm_k.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[15].attn.norm_k.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].attn.norm_k._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].attn.norm_k.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[15].attn.norm_k.weight, 140581772775472) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].attn.norm_k._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].attn.norm_k._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].attn.norm_k._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].attn.norm_k._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].attn.norm_q, accessed_by=DictGetItemGuardAccessor(norm_q) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[15].attn.norm_q, 140581770198320) # if attn.norm_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1727 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].attn.norm_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[15].attn.norm_q.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].attn.norm_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[15].attn.norm_q.training, 140591004393440) # if attn.norm_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1727 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].attn.norm_q.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[15].attn.norm_q.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].attn.norm_q._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].attn.norm_q.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[15].attn.norm_q.weight, 140581772744304) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].attn.norm_q._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].attn.norm_q._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].attn.norm_q._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].attn.norm_q._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].attn.heads, accessed_by=DictGetItemGuardAccessor(heads) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[15].attn.heads == 24 # head_dim = inner_dim // attn.heads # diffusers/src/diffusers/models/attention_processor.py:1721 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].attn.processor, accessed_by=DictGetItemGuardAccessor(processor) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[15].attn.processor, 93831581524080) # attn_parameters = set(inspect.signature(self.processor.__call__).parameters.keys()) # diffusers/src/diffusers/models/attention_processor.py:479 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[15].attn.processor, 140581770198224) # return self.processor( # diffusers/src/diffusers/models/attention_processor.py:490 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].attn._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].attn._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].attn._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].attn._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].attn.forward, accessed_by=GetAttrGuardAccessor(forward) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].attn.forward, accessed_by=FuncDefaultsGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].attn.forward.__defaults__[0], accessed_by=GetItemGuardAccessor(0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[15].attn.forward.__defaults__[0], 140591004478624) # batch_size, _, _ = hidden_states.shape if encoder_hidden_states is None else encoder_hidden_states.shape # diffusers/src/diffusers/models/attention_processor.py:1713 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].norm, accessed_by=DictGetItemGuardAccessor(norm) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[15].norm, 140581770197888) # norm_hidden_states, gate = self.norm(hidden_states, emb=temb) # diffusers/src/diffusers/models/transformers/transformer_flux.py:88 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].norm.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[15].norm.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].norm.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[15].norm.training, 140591004393440) # norm_hidden_states, gate = self.norm(hidden_states, emb=temb) # diffusers/src/diffusers/models/transformers/transformer_flux.py:88 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].norm._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].norm.norm, accessed_by=DictGetItemGuardAccessor(norm) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[15].norm.norm, 140581770198032) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:171 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].norm.norm.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].norm.norm.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[15].norm.norm.training, 140591004393440) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:171 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].norm.silu, accessed_by=DictGetItemGuardAccessor(silu) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[15].norm.silu, 140581770197936) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].norm.silu.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].norm.silu.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[15].norm.silu.training, 140591004393440) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].norm.linear, accessed_by=DictGetItemGuardAccessor(linear) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[15].norm.linear, 140533117920656) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].norm.linear.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[15].norm.linear.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].norm.linear.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[15].norm.linear.training, 140591004393408) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].norm.linear._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].norm.linear.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[15].norm.linear.lora_A, 140533117921136) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].norm.linear.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].norm.linear.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[15].norm.linear.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].norm.linear.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[15].norm.linear.lora_A['default_0'], 140533117895680) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].norm.linear.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].norm.linear.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[15].norm.linear.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].norm.linear.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].norm.linear.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[15].norm.linear.lora_A['default_0'].weight, 140537319713072) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].norm.linear.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[15].norm.linear.lora_B, 140533117907056) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].norm.linear.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].norm.linear.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[15].norm.linear.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].norm.linear.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[15].norm.linear.lora_B['default_0'], 140533117909552) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].norm.linear.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].norm.linear.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[15].norm.linear.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].norm.linear.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[15].norm.linear.base_layer, 140581770197984) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].norm.linear.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].norm.linear.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[15].norm.linear.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].norm.linear.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[15].norm.linear.lora_dropout, 140533117920320) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].norm.linear.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].norm.linear.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[15].norm.linear.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].norm.linear.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[15].norm.linear.lora_dropout['default_0'], 140533117912736) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].norm.linear.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].norm.linear.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[15].norm.linear.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].norm.linear.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[15].norm.linear.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[15].norm.linear.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].norm.linear.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[15].norm.linear.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].norm.linear.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[15].norm.linear.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[15].norm.linear.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].norm.linear.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[15].norm.linear.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].norm.linear._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].norm.linear._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].norm.linear.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[15].norm.linear.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[15].norm.linear.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].norm.linear._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[15].norm.linear._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].norm.linear._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].norm.linear._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].norm.linear._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[15].norm.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[15].norm.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].norm._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].norm._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].norm._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].norm._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].act_mlp, accessed_by=DictGetItemGuardAccessor(act_mlp) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[15].act_mlp, 140581770198128) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].act_mlp.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].act_mlp.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[15].act_mlp.training, 140591004393440) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].proj_mlp, accessed_by=DictGetItemGuardAccessor(proj_mlp) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[15].proj_mlp, 140533117902208) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].proj_mlp.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[15].proj_mlp.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].proj_mlp.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[15].proj_mlp.training, 140591004393408) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].proj_mlp._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].proj_mlp.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[15].proj_mlp.lora_A, 140533117957648) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].proj_mlp.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].proj_mlp.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[15].proj_mlp.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].proj_mlp.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[15].proj_mlp.lora_A['default_0'], 140533117944064) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].proj_mlp.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].proj_mlp.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[15].proj_mlp.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].proj_mlp.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].proj_mlp.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[15].proj_mlp.lora_A['default_0'].weight, 140537319709632) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].proj_mlp.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[15].proj_mlp.lora_B, 140533117945984) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].proj_mlp.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].proj_mlp.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[15].proj_mlp.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].proj_mlp.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[15].proj_mlp.lora_B['default_0'], 140533117944544) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].proj_mlp.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].proj_mlp.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[15].proj_mlp.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].proj_mlp.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[15].proj_mlp.base_layer, 140581770198080) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].proj_mlp.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].proj_mlp.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[15].proj_mlp.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].proj_mlp.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[15].proj_mlp.lora_dropout, 140533117907248) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].proj_mlp.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].proj_mlp.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[15].proj_mlp.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].proj_mlp.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[15].proj_mlp.lora_dropout['default_0'], 140533117896208) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].proj_mlp.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].proj_mlp.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[15].proj_mlp.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].proj_mlp.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[15].proj_mlp.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[15].proj_mlp.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].proj_mlp.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[15].proj_mlp.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].proj_mlp.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[15].proj_mlp.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[15].proj_mlp.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].proj_mlp.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[15].proj_mlp.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].proj_mlp._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].proj_mlp._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].proj_mlp.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[15].proj_mlp.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[15].proj_mlp.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].proj_mlp._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[15].proj_mlp._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].proj_mlp._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].proj_mlp._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].proj_mlp._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[15].proj_mlp._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[15].proj_mlp._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].proj_out, accessed_by=DictGetItemGuardAccessor(proj_out) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[15].proj_out, 140533117956688) # hidden_states = gate * self.proj_out(hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:98 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].proj_out.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[15].proj_out.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].proj_out.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[15].proj_out.training, 140591004393408) # hidden_states = gate * self.proj_out(hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:98 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].proj_out._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].proj_out.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[15].proj_out.lora_A, 140533117956736) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].proj_out.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].proj_out.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[15].proj_out.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].proj_out.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[15].proj_out.lora_A['default_0'], 140533117959856) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].proj_out.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].proj_out.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[15].proj_out.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].proj_out.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].proj_out.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[15].proj_out.lora_A['default_0'].weight, 140537319711632) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].proj_out.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[15].proj_out.lora_B, 140533117951120) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].proj_out.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].proj_out.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[15].proj_out.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].proj_out.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[15].proj_out.lora_B['default_0'], 140533117956784) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].proj_out.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].proj_out.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[15].proj_out.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].proj_out.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[15].proj_out.base_layer, 140581770198176) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].proj_out.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].proj_out.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[15].proj_out.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].proj_out.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[15].proj_out.lora_dropout, 140533117954624) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].proj_out.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].proj_out.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[15].proj_out.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].proj_out.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[15].proj_out.lora_dropout['default_0'], 140533117951264) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].proj_out.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].proj_out.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[15].proj_out.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].proj_out.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[15].proj_out.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[15].proj_out.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].proj_out.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[15].proj_out.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].proj_out.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[15].proj_out.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[15].proj_out.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].proj_out.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[15].proj_out.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].proj_out._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].proj_out._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].proj_out.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[15].proj_out.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[15].proj_out.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].proj_out._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[15].proj_out._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].proj_out._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].proj_out._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].proj_out._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[15].proj_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[15].proj_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16], accessed_by=GetItemGuardAccessor(16) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[16], 140581770197840) # for index_block, block in enumerate(self.single_transformer_blocks): # diffusers/src/diffusers/models/transformers/transformer_flux.py:509 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[16].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[16].training, 140591004393440) # for index_block, block in enumerate(self.single_transformer_blocks): # diffusers/src/diffusers/models/transformers/transformer_flux.py:509 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].attn, accessed_by=DictGetItemGuardAccessor(attn) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[16].attn, 140581770772544) # attn_output = self.attn( # diffusers/src/diffusers/models/transformers/transformer_flux.py:91 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].attn.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[16].attn.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].attn.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[16].attn.training, 140591004393440) # attn_output = self.attn( # diffusers/src/diffusers/models/transformers/transformer_flux.py:91 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].attn._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].attn.to_k, accessed_by=DictGetItemGuardAccessor(to_k) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[16].attn.to_k, 140533128161040) # key = attn.to_k(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1717 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].attn.to_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[16].attn.to_k.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].attn.to_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[16].attn.to_k.training, 140591004393408) # key = attn.to_k(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1717 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].attn.to_k._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].attn.to_k.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[16].attn.to_k.lora_A, 140533126276544) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].attn.to_k.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].attn.to_k.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[16].attn.to_k.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].attn.to_k.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[16].attn.to_k.lora_A['default_0'], 140533126269536) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].attn.to_k.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].attn.to_k.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[16].attn.to_k.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].attn.to_k.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].attn.to_k.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[16].attn.to_k.lora_A['default_0'].weight, 140537319394336) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].attn.to_k.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[16].attn.to_k.lora_B, 140533126281248) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].attn.to_k.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].attn.to_k.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[16].attn.to_k.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].attn.to_k.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[16].attn.to_k.lora_B['default_0'], 140533126277264) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].attn.to_k.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].attn.to_k.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[16].attn.to_k.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].attn.to_k.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[16].attn.to_k.base_layer, 140581770772688) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].attn.to_k.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].attn.to_k.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[16].attn.to_k.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].attn.to_k.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[16].attn.to_k.lora_dropout, 140533126278800) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].attn.to_k.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].attn.to_k.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[16].attn.to_k.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].attn.to_k.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[16].attn.to_k.lora_dropout['default_0'], 140533126270352) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].attn.to_k.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].attn.to_k.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[16].attn.to_k.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].attn.to_k.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[16].attn.to_k.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[16].attn.to_k.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].attn.to_k.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[16].attn.to_k.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].attn.to_k.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[16].attn.to_k.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[16].attn.to_k.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].attn.to_k.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[16].attn.to_k.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].attn.to_k._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].attn.to_k._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].attn.to_k.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[16].attn.to_k.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[16].attn.to_k.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].attn.to_k._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[16].attn.to_k._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].attn.to_k._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].attn.to_k._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].attn.to_k._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[16].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[16].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].attn.to_q, accessed_by=DictGetItemGuardAccessor(to_q) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[16].attn.to_q, 140533126182848) # query = attn.to_q(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1716 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].attn.to_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[16].attn.to_q.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].attn.to_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[16].attn.to_q.training, 140591004393408) # query = attn.to_q(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1716 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].attn.to_q._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].attn.to_q.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[16].attn.to_q.lora_A, 140533126177376) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].attn.to_q.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].attn.to_q.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[16].attn.to_q.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].attn.to_q.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[16].attn.to_q.lora_A['default_0'], 140533126236576) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].attn.to_q.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].attn.to_q.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[16].attn.to_q.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].attn.to_q.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].attn.to_q.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[16].attn.to_q.lora_A['default_0'].weight, 140537319573360) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].attn.to_q.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[16].attn.to_q.lora_B, 140533126240464) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].attn.to_q.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].attn.to_q.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[16].attn.to_q.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].attn.to_q.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[16].attn.to_q.lora_B['default_0'], 140533126238256) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].attn.to_q.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].attn.to_q.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[16].attn.to_q.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].attn.to_q.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[16].attn.to_q.base_layer, 140581770772784) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].attn.to_q.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].attn.to_q.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[16].attn.to_q.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].attn.to_q.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[16].attn.to_q.lora_dropout, 140533126183808) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].attn.to_q.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].attn.to_q.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[16].attn.to_q.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].attn.to_q.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[16].attn.to_q.lora_dropout['default_0'], 140533126183664) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].attn.to_q.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].attn.to_q.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[16].attn.to_q.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].attn.to_q.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[16].attn.to_q.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[16].attn.to_q.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].attn.to_q.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[16].attn.to_q.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].attn.to_q.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[16].attn.to_q.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[16].attn.to_q.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].attn.to_q.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[16].attn.to_q.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].attn.to_q._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].attn.to_q._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].attn.to_q.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[16].attn.to_q.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[16].attn.to_q.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].attn.to_q._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[16].attn.to_q._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].attn.to_q._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].attn.to_q._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].attn.to_q._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[16].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[16].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].attn.to_v, accessed_by=DictGetItemGuardAccessor(to_v) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[16].attn.to_v, 140533126277168) # value = attn.to_v(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1718 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].attn.to_v.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[16].attn.to_v.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].attn.to_v.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[16].attn.to_v.training, 140591004393408) # value = attn.to_v(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1718 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].attn.to_v._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].attn.to_v.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[16].attn.to_v.lora_A, 140533126276928) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].attn.to_v.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].attn.to_v.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[16].attn.to_v.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].attn.to_v.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[16].attn.to_v.lora_A['default_0'], 140533126272224) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].attn.to_v.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].attn.to_v.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[16].attn.to_v.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].attn.to_v.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].attn.to_v.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[16].attn.to_v.lora_A['default_0'].weight, 140537319395536) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].attn.to_v.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[16].attn.to_v.lora_B, 140533126273856) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].attn.to_v.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].attn.to_v.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[16].attn.to_v.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].attn.to_v.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[16].attn.to_v.lora_B['default_0'], 140533126275728) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].attn.to_v.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].attn.to_v.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[16].attn.to_v.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].attn.to_v.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[16].attn.to_v.base_layer, 140581770772832) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].attn.to_v.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].attn.to_v.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[16].attn.to_v.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].attn.to_v.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[16].attn.to_v.lora_dropout, 140533126279760) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].attn.to_v.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].attn.to_v.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[16].attn.to_v.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].attn.to_v.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[16].attn.to_v.lora_dropout['default_0'], 140533126276256) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].attn.to_v.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].attn.to_v.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[16].attn.to_v.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].attn.to_v.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[16].attn.to_v.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[16].attn.to_v.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].attn.to_v.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[16].attn.to_v.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].attn.to_v.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[16].attn.to_v.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[16].attn.to_v.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].attn.to_v.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[16].attn.to_v.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].attn.to_v._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].attn.to_v._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].attn.to_v.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[16].attn.to_v.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[16].attn.to_v.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].attn.to_v._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[16].attn.to_v._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].attn.to_v._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].attn.to_v._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].attn.to_v._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[16].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[16].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].attn.norm_k, accessed_by=DictGetItemGuardAccessor(norm_k) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[16].attn.norm_k, 140581770772736) # if attn.norm_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1729 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].attn.norm_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[16].attn.norm_k.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].attn.norm_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[16].attn.norm_k.training, 140591004393440) # if attn.norm_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1729 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].attn.norm_k.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[16].attn.norm_k.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].attn.norm_k._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].attn.norm_k.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[16].attn.norm_k.weight, 140581772777712) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].attn.norm_k._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].attn.norm_k._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].attn.norm_k._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].attn.norm_k._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].attn.norm_q, accessed_by=DictGetItemGuardAccessor(norm_q) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[16].attn.norm_q, 140581770772592) # if attn.norm_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1727 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].attn.norm_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[16].attn.norm_q.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].attn.norm_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[16].attn.norm_q.training, 140591004393440) # if attn.norm_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1727 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].attn.norm_q.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[16].attn.norm_q.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].attn.norm_q._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].attn.norm_q.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[16].attn.norm_q.weight, 140581772780512) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].attn.norm_q._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].attn.norm_q._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].attn.norm_q._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].attn.norm_q._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].attn.heads, accessed_by=DictGetItemGuardAccessor(heads) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[16].attn.heads == 24 # head_dim = inner_dim // attn.heads # diffusers/src/diffusers/models/attention_processor.py:1721 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].attn.processor, accessed_by=DictGetItemGuardAccessor(processor) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[16].attn.processor, 93831581524080) # attn_parameters = set(inspect.signature(self.processor.__call__).parameters.keys()) # diffusers/src/diffusers/models/attention_processor.py:479 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[16].attn.processor, 140581770198992) # return self.processor( # diffusers/src/diffusers/models/attention_processor.py:490 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].attn._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].attn._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].attn._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].attn._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].attn.forward, accessed_by=GetAttrGuardAccessor(forward) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].attn.forward, accessed_by=FuncDefaultsGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].attn.forward.__defaults__[0], accessed_by=GetItemGuardAccessor(0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[16].attn.forward.__defaults__[0], 140591004478624) # batch_size, _, _ = hidden_states.shape if encoder_hidden_states is None else encoder_hidden_states.shape # diffusers/src/diffusers/models/attention_processor.py:1713 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].norm, accessed_by=DictGetItemGuardAccessor(norm) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[16].norm, 140581770198656) # norm_hidden_states, gate = self.norm(hidden_states, emb=temb) # diffusers/src/diffusers/models/transformers/transformer_flux.py:88 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].norm.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[16].norm.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].norm.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[16].norm.training, 140591004393440) # norm_hidden_states, gate = self.norm(hidden_states, emb=temb) # diffusers/src/diffusers/models/transformers/transformer_flux.py:88 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].norm._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].norm.norm, accessed_by=DictGetItemGuardAccessor(norm) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[16].norm.norm, 140581770198800) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:171 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].norm.norm.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].norm.norm.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[16].norm.norm.training, 140591004393440) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:171 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].norm.silu, accessed_by=DictGetItemGuardAccessor(silu) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[16].norm.silu, 140581770198704) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].norm.silu.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].norm.silu.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[16].norm.silu.training, 140591004393440) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].norm.linear, accessed_by=DictGetItemGuardAccessor(linear) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[16].norm.linear, 140533128156864) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].norm.linear.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[16].norm.linear.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].norm.linear.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[16].norm.linear.training, 140591004393408) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].norm.linear._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].norm.linear.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[16].norm.linear.lora_A, 140533128158112) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].norm.linear.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].norm.linear.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[16].norm.linear.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].norm.linear.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[16].norm.linear.lora_A['default_0'], 140533128159648) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].norm.linear.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].norm.linear.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[16].norm.linear.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].norm.linear.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].norm.linear.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[16].norm.linear.lora_A['default_0'].weight, 140537319580560) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].norm.linear.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[16].norm.linear.lora_B, 140533128158304) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].norm.linear.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].norm.linear.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[16].norm.linear.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].norm.linear.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[16].norm.linear.lora_B['default_0'], 140533128151584) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].norm.linear.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].norm.linear.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[16].norm.linear.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].norm.linear.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[16].norm.linear.base_layer, 140581770198752) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].norm.linear.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].norm.linear.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[16].norm.linear.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].norm.linear.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[16].norm.linear.lora_dropout, 140533128155280) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].norm.linear.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].norm.linear.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[16].norm.linear.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].norm.linear.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[16].norm.linear.lora_dropout['default_0'], 140533128156240) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].norm.linear.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].norm.linear.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[16].norm.linear.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].norm.linear.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[16].norm.linear.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[16].norm.linear.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].norm.linear.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[16].norm.linear.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].norm.linear.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[16].norm.linear.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[16].norm.linear.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].norm.linear.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[16].norm.linear.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].norm.linear._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].norm.linear._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].norm.linear.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[16].norm.linear.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[16].norm.linear.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].norm.linear._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[16].norm.linear._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].norm.linear._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].norm.linear._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].norm.linear._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[16].norm.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[16].norm.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].norm._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].norm._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].norm._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].norm._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].act_mlp, accessed_by=DictGetItemGuardAccessor(act_mlp) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[16].act_mlp, 140581770198896) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].act_mlp.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].act_mlp.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[16].act_mlp.training, 140591004393440) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].proj_mlp, accessed_by=DictGetItemGuardAccessor(proj_mlp) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[16].proj_mlp, 140533128165696) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].proj_mlp.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[16].proj_mlp.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].proj_mlp.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[16].proj_mlp.training, 140591004393408) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].proj_mlp._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].proj_mlp.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[16].proj_mlp.lora_A, 140533128162528) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].proj_mlp.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].proj_mlp.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[16].proj_mlp.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].proj_mlp.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[16].proj_mlp.lora_A['default_0'], 140533128153840) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].proj_mlp.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].proj_mlp.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[16].proj_mlp.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].proj_mlp.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].proj_mlp.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[16].proj_mlp.lora_A['default_0'].weight, 140537319586080) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].proj_mlp.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[16].proj_mlp.lora_B, 140533128165888) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].proj_mlp.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].proj_mlp.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[16].proj_mlp.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].proj_mlp.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[16].proj_mlp.lora_B['default_0'], 140533128166512) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].proj_mlp.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].proj_mlp.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[16].proj_mlp.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].proj_mlp.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[16].proj_mlp.base_layer, 140581770198848) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].proj_mlp.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].proj_mlp.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[16].proj_mlp.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].proj_mlp.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[16].proj_mlp.lora_dropout, 140533128160560) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].proj_mlp.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].proj_mlp.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[16].proj_mlp.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].proj_mlp.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[16].proj_mlp.lora_dropout['default_0'], 140533128161808) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].proj_mlp.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].proj_mlp.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[16].proj_mlp.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].proj_mlp.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[16].proj_mlp.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[16].proj_mlp.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].proj_mlp.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[16].proj_mlp.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].proj_mlp.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[16].proj_mlp.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[16].proj_mlp.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].proj_mlp.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[16].proj_mlp.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].proj_mlp._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].proj_mlp._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].proj_mlp.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[16].proj_mlp.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[16].proj_mlp.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].proj_mlp._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[16].proj_mlp._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].proj_mlp._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].proj_mlp._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].proj_mlp._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[16].proj_mlp._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[16].proj_mlp._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].proj_out, accessed_by=DictGetItemGuardAccessor(proj_out) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[16].proj_out, 140533114544976) # hidden_states = gate * self.proj_out(hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:98 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].proj_out.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[16].proj_out.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].proj_out.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[16].proj_out.training, 140591004393408) # hidden_states = gate * self.proj_out(hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:98 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].proj_out._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].proj_out.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[16].proj_out.lora_A, 140533126172864) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].proj_out.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].proj_out.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[16].proj_out.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].proj_out.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[16].proj_out.lora_A['default_0'], 140533126175024) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].proj_out.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].proj_out.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[16].proj_out.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].proj_out.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].proj_out.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[16].proj_out.lora_A['default_0'].weight, 140537319583520) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].proj_out.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[16].proj_out.lora_B, 140533126180688) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].proj_out.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].proj_out.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[16].proj_out.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].proj_out.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[16].proj_out.lora_B['default_0'], 140533126181456) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].proj_out.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].proj_out.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[16].proj_out.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].proj_out.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[16].proj_out.base_layer, 140581770198944) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].proj_out.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].proj_out.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[16].proj_out.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].proj_out.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[16].proj_out.lora_dropout, 140533126174544) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].proj_out.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].proj_out.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[16].proj_out.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].proj_out.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[16].proj_out.lora_dropout['default_0'], 140533126177520) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].proj_out.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].proj_out.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[16].proj_out.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].proj_out.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[16].proj_out.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[16].proj_out.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].proj_out.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[16].proj_out.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].proj_out.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[16].proj_out.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[16].proj_out.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].proj_out.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[16].proj_out.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].proj_out._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].proj_out._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].proj_out.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[16].proj_out.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[16].proj_out.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].proj_out._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[16].proj_out._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].proj_out._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].proj_out._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].proj_out._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[16].proj_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[16].proj_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17], accessed_by=GetItemGuardAccessor(17) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[17], 140581770198608) # for index_block, block in enumerate(self.single_transformer_blocks): # diffusers/src/diffusers/models/transformers/transformer_flux.py:509 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[17].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[17].training, 140591004393440) # for index_block, block in enumerate(self.single_transformer_blocks): # diffusers/src/diffusers/models/transformers/transformer_flux.py:509 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].attn, accessed_by=DictGetItemGuardAccessor(attn) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[17].attn, 140581770773312) # attn_output = self.attn( # diffusers/src/diffusers/models/transformers/transformer_flux.py:91 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].attn.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[17].attn.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].attn.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[17].attn.training, 140591004393440) # attn_output = self.attn( # diffusers/src/diffusers/models/transformers/transformer_flux.py:91 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].attn._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].attn.to_k, accessed_by=DictGetItemGuardAccessor(to_k) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[17].attn.to_k, 140533126479248) # key = attn.to_k(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1717 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].attn.to_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[17].attn.to_k.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].attn.to_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[17].attn.to_k.training, 140591004393408) # key = attn.to_k(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1717 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].attn.to_k._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].attn.to_k.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[17].attn.to_k.lora_A, 140533126475408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].attn.to_k.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].attn.to_k.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[17].attn.to_k.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].attn.to_k.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[17].attn.to_k.lora_A['default_0'], 140533127120400) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].attn.to_k.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].attn.to_k.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[17].attn.to_k.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].attn.to_k.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].attn.to_k.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[17].attn.to_k.lora_A['default_0'].weight, 140537321359456) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].attn.to_k.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[17].attn.to_k.lora_B, 140533126465424) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].attn.to_k.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].attn.to_k.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[17].attn.to_k.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].attn.to_k.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[17].attn.to_k.lora_B['default_0'], 140533127125728) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].attn.to_k.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].attn.to_k.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[17].attn.to_k.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].attn.to_k.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[17].attn.to_k.base_layer, 140581770773456) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].attn.to_k.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].attn.to_k.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[17].attn.to_k.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].attn.to_k.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[17].attn.to_k.lora_dropout, 140533126477280) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].attn.to_k.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].attn.to_k.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[17].attn.to_k.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].attn.to_k.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[17].attn.to_k.lora_dropout['default_0'], 140533126473440) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].attn.to_k.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].attn.to_k.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[17].attn.to_k.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].attn.to_k.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[17].attn.to_k.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[17].attn.to_k.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].attn.to_k.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[17].attn.to_k.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].attn.to_k.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[17].attn.to_k.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[17].attn.to_k.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].attn.to_k.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[17].attn.to_k.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].attn.to_k._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].attn.to_k._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].attn.to_k.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[17].attn.to_k.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[17].attn.to_k.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].attn.to_k._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[17].attn.to_k._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].attn.to_k._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].attn.to_k._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].attn.to_k._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[17].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[17].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].attn.to_q, accessed_by=DictGetItemGuardAccessor(to_q) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[17].attn.to_q, 140533155263488) # query = attn.to_q(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1716 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].attn.to_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[17].attn.to_q.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].attn.to_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[17].attn.to_q.training, 140591004393408) # query = attn.to_q(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1716 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].attn.to_q._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].attn.to_q.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[17].attn.to_q.lora_A, 140533126415168) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].attn.to_q.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].attn.to_q.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[17].attn.to_q.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].attn.to_q.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[17].attn.to_q.lora_A['default_0'], 140533126420016) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].attn.to_q.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].attn.to_q.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[17].attn.to_q.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].attn.to_q.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].attn.to_q.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[17].attn.to_q.lora_A['default_0'].weight, 140537319396416) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].attn.to_q.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[17].attn.to_q.lora_B, 140533126430672) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].attn.to_q.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].attn.to_q.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[17].attn.to_q.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].attn.to_q.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[17].attn.to_q.lora_B['default_0'], 140533126415456) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].attn.to_q.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].attn.to_q.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[17].attn.to_q.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].attn.to_q.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[17].attn.to_q.base_layer, 140581770773552) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].attn.to_q.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].attn.to_q.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[17].attn.to_q.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].attn.to_q.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[17].attn.to_q.lora_dropout, 140533126420976) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].attn.to_q.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].attn.to_q.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[17].attn.to_q.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].attn.to_q.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[17].attn.to_q.lora_dropout['default_0'], 140533126422560) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].attn.to_q.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].attn.to_q.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[17].attn.to_q.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].attn.to_q.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[17].attn.to_q.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[17].attn.to_q.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].attn.to_q.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[17].attn.to_q.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].attn.to_q.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[17].attn.to_q.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[17].attn.to_q.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].attn.to_q.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[17].attn.to_q.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].attn.to_q._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].attn.to_q._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].attn.to_q.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[17].attn.to_q.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[17].attn.to_q.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].attn.to_q._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[17].attn.to_q._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].attn.to_q._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].attn.to_q._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].attn.to_q._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[17].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[17].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].attn.to_v, accessed_by=DictGetItemGuardAccessor(to_v) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[17].attn.to_v, 140533127001472) # value = attn.to_v(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1718 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].attn.to_v.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[17].attn.to_v.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].attn.to_v.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[17].attn.to_v.training, 140591004393408) # value = attn.to_v(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1718 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].attn.to_v._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].attn.to_v.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[17].attn.to_v.lora_A, 140533126990768) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].attn.to_v.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].attn.to_v.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[17].attn.to_v.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].attn.to_v.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[17].attn.to_v.lora_A['default_0'], 140533127054480) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].attn.to_v.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].attn.to_v.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[17].attn.to_v.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].attn.to_v.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].attn.to_v.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[17].attn.to_v.lora_A['default_0'].weight, 140537321357616) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].attn.to_v.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[17].attn.to_v.lora_B, 140533126991584) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].attn.to_v.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].attn.to_v.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[17].attn.to_v.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].attn.to_v.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[17].attn.to_v.lora_B['default_0'], 140533127061872) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].attn.to_v.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].attn.to_v.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[17].attn.to_v.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].attn.to_v.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[17].attn.to_v.base_layer, 140581770773600) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].attn.to_v.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].attn.to_v.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[17].attn.to_v.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].attn.to_v.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[17].attn.to_v.lora_dropout, 140533126997584) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].attn.to_v.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].attn.to_v.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[17].attn.to_v.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].attn.to_v.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[17].attn.to_v.lora_dropout['default_0'], 140533126997296) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].attn.to_v.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].attn.to_v.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[17].attn.to_v.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].attn.to_v.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[17].attn.to_v.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[17].attn.to_v.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].attn.to_v.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[17].attn.to_v.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].attn.to_v.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[17].attn.to_v.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[17].attn.to_v.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].attn.to_v.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[17].attn.to_v.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].attn.to_v._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].attn.to_v._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].attn.to_v.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[17].attn.to_v.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[17].attn.to_v.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].attn.to_v._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[17].attn.to_v._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].attn.to_v._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].attn.to_v._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].attn.to_v._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[17].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[17].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].attn.norm_k, accessed_by=DictGetItemGuardAccessor(norm_k) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[17].attn.norm_k, 140581770773504) # if attn.norm_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1729 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].attn.norm_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[17].attn.norm_k.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].attn.norm_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[17].attn.norm_k.training, 140591004393440) # if attn.norm_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1729 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].attn.norm_k.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[17].attn.norm_k.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].attn.norm_k._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].attn.norm_k.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[17].attn.norm_k.weight, 140581772714416) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].attn.norm_k._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].attn.norm_k._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].attn.norm_k._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].attn.norm_k._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].attn.norm_q, accessed_by=DictGetItemGuardAccessor(norm_q) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[17].attn.norm_q, 140581770773360) # if attn.norm_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1727 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].attn.norm_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[17].attn.norm_q.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].attn.norm_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[17].attn.norm_q.training, 140591004393440) # if attn.norm_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1727 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].attn.norm_q.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[17].attn.norm_q.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].attn.norm_q._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].attn.norm_q.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[17].attn.norm_q.weight, 140581765134464) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].attn.norm_q._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].attn.norm_q._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].attn.norm_q._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].attn.norm_q._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].attn.heads, accessed_by=DictGetItemGuardAccessor(heads) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[17].attn.heads == 24 # head_dim = inner_dim // attn.heads # diffusers/src/diffusers/models/attention_processor.py:1721 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].attn.processor, accessed_by=DictGetItemGuardAccessor(processor) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[17].attn.processor, 93831581524080) # attn_parameters = set(inspect.signature(self.processor.__call__).parameters.keys()) # diffusers/src/diffusers/models/attention_processor.py:479 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[17].attn.processor, 140581770773264) # return self.processor( # diffusers/src/diffusers/models/attention_processor.py:490 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].attn._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].attn._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].attn._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].attn._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].attn.forward, accessed_by=GetAttrGuardAccessor(forward) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].attn.forward, accessed_by=FuncDefaultsGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].attn.forward.__defaults__[0], accessed_by=GetItemGuardAccessor(0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[17].attn.forward.__defaults__[0], 140591004478624) # batch_size, _, _ = hidden_states.shape if encoder_hidden_states is None else encoder_hidden_states.shape # diffusers/src/diffusers/models/attention_processor.py:1713 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].norm, accessed_by=DictGetItemGuardAccessor(norm) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[17].norm, 140581770772928) # norm_hidden_states, gate = self.norm(hidden_states, emb=temb) # diffusers/src/diffusers/models/transformers/transformer_flux.py:88 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].norm.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[17].norm.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].norm.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[17].norm.training, 140591004393440) # norm_hidden_states, gate = self.norm(hidden_states, emb=temb) # diffusers/src/diffusers/models/transformers/transformer_flux.py:88 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].norm._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].norm.norm, accessed_by=DictGetItemGuardAccessor(norm) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[17].norm.norm, 140581770773072) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:171 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].norm.norm.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].norm.norm.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[17].norm.norm.training, 140591004393440) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:171 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].norm.silu, accessed_by=DictGetItemGuardAccessor(silu) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[17].norm.silu, 140581770772976) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].norm.silu.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].norm.silu.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[17].norm.silu.training, 140591004393440) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].norm.linear, accessed_by=DictGetItemGuardAccessor(linear) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[17].norm.linear, 140533126273952) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].norm.linear.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[17].norm.linear.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].norm.linear.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[17].norm.linear.training, 140591004393408) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].norm.linear._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].norm.linear.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[17].norm.linear.lora_A, 140533126268576) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].norm.linear.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].norm.linear.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[17].norm.linear.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].norm.linear.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[17].norm.linear.lora_A['default_0'], 140533126301008) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].norm.linear.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].norm.linear.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[17].norm.linear.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].norm.linear.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].norm.linear.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[17].norm.linear.lora_A['default_0'].weight, 140537319393216) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].norm.linear.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[17].norm.linear.lora_B, 140533126304992) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].norm.linear.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].norm.linear.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[17].norm.linear.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].norm.linear.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[17].norm.linear.lora_B['default_0'], 140533125429232) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].norm.linear.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].norm.linear.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[17].norm.linear.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].norm.linear.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[17].norm.linear.base_layer, 140581770773024) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].norm.linear.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].norm.linear.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[17].norm.linear.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].norm.linear.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[17].norm.linear.lora_dropout, 140533126271744) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].norm.linear.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].norm.linear.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[17].norm.linear.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].norm.linear.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[17].norm.linear.lora_dropout['default_0'], 140533126273664) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].norm.linear.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].norm.linear.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[17].norm.linear.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].norm.linear.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[17].norm.linear.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[17].norm.linear.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].norm.linear.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[17].norm.linear.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].norm.linear.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[17].norm.linear.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[17].norm.linear.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].norm.linear.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[17].norm.linear.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].norm.linear._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].norm.linear._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].norm.linear.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[17].norm.linear.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[17].norm.linear.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].norm.linear._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[17].norm.linear._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].norm.linear._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].norm.linear._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].norm.linear._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[17].norm.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[17].norm.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].norm._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].norm._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].norm._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].norm._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].act_mlp, accessed_by=DictGetItemGuardAccessor(act_mlp) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[17].act_mlp, 140581770773168) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].act_mlp.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].act_mlp.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[17].act_mlp.training, 140591004393440) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].proj_mlp, accessed_by=DictGetItemGuardAccessor(proj_mlp) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[17].proj_mlp, 140533125419680) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].proj_mlp.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[17].proj_mlp.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].proj_mlp.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[17].proj_mlp.training, 140591004393408) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].proj_mlp._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].proj_mlp.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[17].proj_mlp.lora_A, 140533125430816) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].proj_mlp.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].proj_mlp.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[17].proj_mlp.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].proj_mlp.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[17].proj_mlp.lora_A['default_0'], 140533125425200) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].proj_mlp.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].proj_mlp.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[17].proj_mlp.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].proj_mlp.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].proj_mlp.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[17].proj_mlp.lora_A['default_0'].weight, 140537319401456) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].proj_mlp.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[17].proj_mlp.lora_B, 140533125427648) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].proj_mlp.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].proj_mlp.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[17].proj_mlp.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].proj_mlp.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[17].proj_mlp.lora_B['default_0'], 140533125424048) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].proj_mlp.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].proj_mlp.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[17].proj_mlp.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].proj_mlp.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[17].proj_mlp.base_layer, 140581770773120) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].proj_mlp.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].proj_mlp.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[17].proj_mlp.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].proj_mlp.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[17].proj_mlp.lora_dropout, 140533125421120) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].proj_mlp.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].proj_mlp.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[17].proj_mlp.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].proj_mlp.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[17].proj_mlp.lora_dropout['default_0'], 140533125415168) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].proj_mlp.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].proj_mlp.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[17].proj_mlp.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].proj_mlp.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[17].proj_mlp.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[17].proj_mlp.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].proj_mlp.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[17].proj_mlp.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].proj_mlp.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[17].proj_mlp.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[17].proj_mlp.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].proj_mlp.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[17].proj_mlp.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].proj_mlp._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].proj_mlp._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].proj_mlp.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[17].proj_mlp.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[17].proj_mlp.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].proj_mlp._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[17].proj_mlp._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].proj_mlp._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].proj_mlp._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].proj_mlp._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[17].proj_mlp._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[17].proj_mlp._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].proj_out, accessed_by=DictGetItemGuardAccessor(proj_out) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[17].proj_out, 140533125427696) # hidden_states = gate * self.proj_out(hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:98 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].proj_out.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[17].proj_out.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].proj_out.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[17].proj_out.training, 140591004393408) # hidden_states = gate * self.proj_out(hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:98 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].proj_out._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].proj_out.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[17].proj_out.lora_A, 140533125428464) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].proj_out.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].proj_out.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[17].proj_out.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].proj_out.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[17].proj_out.lora_A['default_0'], 140533126377296) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].proj_out.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].proj_out.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[17].proj_out.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].proj_out.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].proj_out.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[17].proj_out.lora_A['default_0'].weight, 140537319400896) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].proj_out.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[17].proj_out.lora_B, 140533126378208) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].proj_out.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].proj_out.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[17].proj_out.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].proj_out.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[17].proj_out.lora_B['default_0'], 140533126381424) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].proj_out.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].proj_out.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[17].proj_out.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].proj_out.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[17].proj_out.base_layer, 140581770773216) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].proj_out.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].proj_out.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[17].proj_out.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].proj_out.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[17].proj_out.lora_dropout, 140533125430336) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].proj_out.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].proj_out.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[17].proj_out.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].proj_out.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[17].proj_out.lora_dropout['default_0'], 140533125426016) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].proj_out.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].proj_out.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[17].proj_out.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].proj_out.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[17].proj_out.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[17].proj_out.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].proj_out.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[17].proj_out.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].proj_out.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[17].proj_out.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[17].proj_out.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].proj_out.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[17].proj_out.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].proj_out._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].proj_out._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].proj_out.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[17].proj_out.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[17].proj_out.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].proj_out._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[17].proj_out._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].proj_out._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].proj_out._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].proj_out._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[17].proj_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[17].proj_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18], accessed_by=GetItemGuardAccessor(18) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[18], 140581770772880) # for index_block, block in enumerate(self.single_transformer_blocks): # diffusers/src/diffusers/models/transformers/transformer_flux.py:509 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[18].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[18].training, 140591004393440) # for index_block, block in enumerate(self.single_transformer_blocks): # diffusers/src/diffusers/models/transformers/transformer_flux.py:509 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].attn, accessed_by=DictGetItemGuardAccessor(attn) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[18].attn, 140581770774080) # attn_output = self.attn( # diffusers/src/diffusers/models/transformers/transformer_flux.py:91 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].attn.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[18].attn.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].attn.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[18].attn.training, 140591004393440) # attn_output = self.attn( # diffusers/src/diffusers/models/transformers/transformer_flux.py:91 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].attn._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].attn.to_k, accessed_by=DictGetItemGuardAccessor(to_k) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[18].attn.to_k, 140533126640688) # key = attn.to_k(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1717 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].attn.to_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[18].attn.to_k.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].attn.to_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[18].attn.to_k.training, 140591004393408) # key = attn.to_k(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1717 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].attn.to_k._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].attn.to_k.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[18].attn.to_k.lora_A, 140533126633968) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].attn.to_k.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].attn.to_k.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[18].attn.to_k.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].attn.to_k.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[18].attn.to_k.lora_A['default_0'], 140533126629744) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].attn.to_k.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].attn.to_k.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[18].attn.to_k.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].attn.to_k.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].attn.to_k.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[18].attn.to_k.lora_A['default_0'].weight, 140537321364656) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].attn.to_k.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[18].attn.to_k.lora_B, 140533126636368) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].attn.to_k.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].attn.to_k.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[18].attn.to_k.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].attn.to_k.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[18].attn.to_k.lora_B['default_0'], 140533126643136) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].attn.to_k.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].attn.to_k.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[18].attn.to_k.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].attn.to_k.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[18].attn.to_k.base_layer, 140581770774224) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].attn.to_k.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].attn.to_k.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[18].attn.to_k.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].attn.to_k.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[18].attn.to_k.lora_dropout, 140533126631856) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].attn.to_k.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].attn.to_k.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[18].attn.to_k.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].attn.to_k.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[18].attn.to_k.lora_dropout['default_0'], 140533126637088) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].attn.to_k.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].attn.to_k.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[18].attn.to_k.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].attn.to_k.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[18].attn.to_k.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[18].attn.to_k.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].attn.to_k.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[18].attn.to_k.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].attn.to_k.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[18].attn.to_k.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[18].attn.to_k.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].attn.to_k.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[18].attn.to_k.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].attn.to_k._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].attn.to_k._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].attn.to_k.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[18].attn.to_k.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[18].attn.to_k.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].attn.to_k._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[18].attn.to_k._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].attn.to_k._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].attn.to_k._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].attn.to_k._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[18].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[18].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].attn.to_q, accessed_by=DictGetItemGuardAccessor(to_q) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[18].attn.to_q, 140533126634736) # query = attn.to_q(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1716 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].attn.to_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[18].attn.to_q.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].attn.to_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[18].attn.to_q.training, 140591004393408) # query = attn.to_q(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1716 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].attn.to_q._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].attn.to_q.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[18].attn.to_q.lora_A, 140533126629024) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].attn.to_q.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].attn.to_q.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[18].attn.to_q.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].attn.to_q.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[18].attn.to_q.lora_A['default_0'], 140533126633680) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].attn.to_q.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].attn.to_q.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[18].attn.to_q.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].attn.to_q.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].attn.to_q.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[18].attn.to_q.lora_A['default_0'].weight, 140537321365776) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].attn.to_q.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[18].attn.to_q.lora_B, 140533126629552) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].attn.to_q.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].attn.to_q.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[18].attn.to_q.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].attn.to_q.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[18].attn.to_q.lora_B['default_0'], 140533126635840) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].attn.to_q.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].attn.to_q.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[18].attn.to_q.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].attn.to_q.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[18].attn.to_q.base_layer, 140581770774320) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].attn.to_q.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].attn.to_q.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[18].attn.to_q.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].attn.to_q.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[18].attn.to_q.lora_dropout, 140533126627776) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].attn.to_q.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].attn.to_q.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[18].attn.to_q.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].attn.to_q.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[18].attn.to_q.lora_dropout['default_0'], 140533126630224) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].attn.to_q.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].attn.to_q.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[18].attn.to_q.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].attn.to_q.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[18].attn.to_q.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[18].attn.to_q.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].attn.to_q.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[18].attn.to_q.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].attn.to_q.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[18].attn.to_q.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[18].attn.to_q.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].attn.to_q.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[18].attn.to_q.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].attn.to_q._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].attn.to_q._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].attn.to_q.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[18].attn.to_q.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[18].attn.to_q.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].attn.to_q._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[18].attn.to_q._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].attn.to_q._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].attn.to_q._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].attn.to_q._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[18].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[18].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].attn.to_v, accessed_by=DictGetItemGuardAccessor(to_v) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[18].attn.to_v, 140533126630656) # value = attn.to_v(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1718 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].attn.to_v.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[18].attn.to_v.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].attn.to_v.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[18].attn.to_v.training, 140591004393408) # value = attn.to_v(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1718 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].attn.to_v._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].attn.to_v.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[18].attn.to_v.lora_A, 140533126636560) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].attn.to_v.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].attn.to_v.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[18].attn.to_v.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].attn.to_v.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[18].attn.to_v.lora_A['default_0'], 140533126896256) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].attn.to_v.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].attn.to_v.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[18].attn.to_v.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].attn.to_v.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].attn.to_v.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[18].attn.to_v.lora_A['default_0'].weight, 140537321235744) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].attn.to_v.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[18].attn.to_v.lora_B, 140533126631232) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].attn.to_v.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].attn.to_v.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[18].attn.to_v.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].attn.to_v.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[18].attn.to_v.lora_B['default_0'], 140533126894672) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].attn.to_v.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].attn.to_v.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[18].attn.to_v.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].attn.to_v.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[18].attn.to_v.base_layer, 140581770774368) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].attn.to_v.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].attn.to_v.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[18].attn.to_v.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].attn.to_v.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[18].attn.to_v.lora_dropout, 140533126638480) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].attn.to_v.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].attn.to_v.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[18].attn.to_v.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].attn.to_v.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[18].attn.to_v.lora_dropout['default_0'], 140533126636944) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].attn.to_v.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].attn.to_v.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[18].attn.to_v.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].attn.to_v.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[18].attn.to_v.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[18].attn.to_v.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].attn.to_v.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[18].attn.to_v.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].attn.to_v.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[18].attn.to_v.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[18].attn.to_v.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].attn.to_v.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[18].attn.to_v.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].attn.to_v._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].attn.to_v._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].attn.to_v.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[18].attn.to_v.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[18].attn.to_v.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].attn.to_v._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[18].attn.to_v._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].attn.to_v._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].attn.to_v._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].attn.to_v._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[18].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[18].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].attn.norm_k, accessed_by=DictGetItemGuardAccessor(norm_k) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[18].attn.norm_k, 140581770774272) # if attn.norm_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1729 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].attn.norm_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[18].attn.norm_k.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].attn.norm_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[18].attn.norm_k.training, 140591004393440) # if attn.norm_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1729 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].attn.norm_k.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[18].attn.norm_k.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].attn.norm_k._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].attn.norm_k.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[18].attn.norm_k.weight, 140581783349232) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].attn.norm_k._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].attn.norm_k._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].attn.norm_k._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].attn.norm_k._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].attn.norm_q, accessed_by=DictGetItemGuardAccessor(norm_q) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[18].attn.norm_q, 140581770774128) # if attn.norm_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1727 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].attn.norm_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[18].attn.norm_q.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].attn.norm_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[18].attn.norm_q.training, 140591004393440) # if attn.norm_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1727 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].attn.norm_q.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[18].attn.norm_q.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].attn.norm_q._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].attn.norm_q.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[18].attn.norm_q.weight, 140581772716976) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].attn.norm_q._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].attn.norm_q._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].attn.norm_q._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].attn.norm_q._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].attn.heads, accessed_by=DictGetItemGuardAccessor(heads) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[18].attn.heads == 24 # head_dim = inner_dim // attn.heads # diffusers/src/diffusers/models/attention_processor.py:1721 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].attn.processor, accessed_by=DictGetItemGuardAccessor(processor) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[18].attn.processor, 93831581524080) # attn_parameters = set(inspect.signature(self.processor.__call__).parameters.keys()) # diffusers/src/diffusers/models/attention_processor.py:479 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[18].attn.processor, 140581770774032) # return self.processor( # diffusers/src/diffusers/models/attention_processor.py:490 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].attn._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].attn._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].attn._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].attn._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].attn.forward, accessed_by=GetAttrGuardAccessor(forward) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].attn.forward, accessed_by=FuncDefaultsGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].attn.forward.__defaults__[0], accessed_by=GetItemGuardAccessor(0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[18].attn.forward.__defaults__[0], 140591004478624) # batch_size, _, _ = hidden_states.shape if encoder_hidden_states is None else encoder_hidden_states.shape # diffusers/src/diffusers/models/attention_processor.py:1713 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].norm, accessed_by=DictGetItemGuardAccessor(norm) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[18].norm, 140581770773696) # norm_hidden_states, gate = self.norm(hidden_states, emb=temb) # diffusers/src/diffusers/models/transformers/transformer_flux.py:88 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].norm.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[18].norm.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].norm.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[18].norm.training, 140591004393440) # norm_hidden_states, gate = self.norm(hidden_states, emb=temb) # diffusers/src/diffusers/models/transformers/transformer_flux.py:88 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].norm._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].norm.norm, accessed_by=DictGetItemGuardAccessor(norm) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[18].norm.norm, 140581770773840) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:171 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].norm.norm.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].norm.norm.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[18].norm.norm.training, 140591004393440) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:171 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].norm.silu, accessed_by=DictGetItemGuardAccessor(silu) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[18].norm.silu, 140581770773744) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].norm.silu.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].norm.silu.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[18].norm.silu.training, 140591004393440) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].norm.linear, accessed_by=DictGetItemGuardAccessor(linear) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[18].norm.linear, 140533155253024) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].norm.linear.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[18].norm.linear.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].norm.linear.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[18].norm.linear.training, 140591004393408) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].norm.linear._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].norm.linear.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[18].norm.linear.lora_A, 140533125489392) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].norm.linear.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].norm.linear.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[18].norm.linear.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].norm.linear.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[18].norm.linear.lora_A['default_0'], 140533125482336) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].norm.linear.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].norm.linear.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[18].norm.linear.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].norm.linear.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].norm.linear.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[18].norm.linear.lora_A['default_0'].weight, 140537321372496) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].norm.linear.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[18].norm.linear.lora_B, 140533125491936) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].norm.linear.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].norm.linear.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[18].norm.linear.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].norm.linear.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[18].norm.linear.lora_B['default_0'], 140533125481088) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].norm.linear.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].norm.linear.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[18].norm.linear.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].norm.linear.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[18].norm.linear.base_layer, 140581770773792) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].norm.linear.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].norm.linear.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[18].norm.linear.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].norm.linear.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[18].norm.linear.lora_dropout, 140533125487712) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].norm.linear.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].norm.linear.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[18].norm.linear.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].norm.linear.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[18].norm.linear.lora_dropout['default_0'], 140533125483536) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].norm.linear.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].norm.linear.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[18].norm.linear.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].norm.linear.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[18].norm.linear.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[18].norm.linear.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].norm.linear.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[18].norm.linear.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].norm.linear.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[18].norm.linear.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[18].norm.linear.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].norm.linear.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[18].norm.linear.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].norm.linear._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].norm.linear._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].norm.linear.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[18].norm.linear.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[18].norm.linear.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].norm.linear._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[18].norm.linear._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].norm.linear._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].norm.linear._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].norm.linear._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[18].norm.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[18].norm.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].norm._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].norm._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].norm._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].norm._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].act_mlp, accessed_by=DictGetItemGuardAccessor(act_mlp) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[18].act_mlp, 140581770773936) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].act_mlp.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].act_mlp.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[18].act_mlp.training, 140591004393440) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].proj_mlp, accessed_by=DictGetItemGuardAccessor(proj_mlp) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[18].proj_mlp, 140533125485264) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].proj_mlp.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[18].proj_mlp.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].proj_mlp.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[18].proj_mlp.training, 140591004393408) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].proj_mlp._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].proj_mlp.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[18].proj_mlp.lora_A, 140533125490064) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].proj_mlp.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].proj_mlp.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[18].proj_mlp.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].proj_mlp.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[18].proj_mlp.lora_A['default_0'], 140533126827936) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].proj_mlp.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].proj_mlp.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[18].proj_mlp.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].proj_mlp.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].proj_mlp.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[18].proj_mlp.lora_A['default_0'].weight, 140537321364336) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].proj_mlp.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[18].proj_mlp.lora_B, 140533125491168) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].proj_mlp.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].proj_mlp.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[18].proj_mlp.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].proj_mlp.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[18].proj_mlp.lora_B['default_0'], 140533126826256) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].proj_mlp.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].proj_mlp.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[18].proj_mlp.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].proj_mlp.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[18].proj_mlp.base_layer, 140581770773888) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].proj_mlp.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].proj_mlp.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[18].proj_mlp.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].proj_mlp.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[18].proj_mlp.lora_dropout, 140533125494432) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].proj_mlp.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].proj_mlp.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[18].proj_mlp.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].proj_mlp.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[18].proj_mlp.lora_dropout['default_0'], 140533125494576) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].proj_mlp.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].proj_mlp.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[18].proj_mlp.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].proj_mlp.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[18].proj_mlp.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[18].proj_mlp.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].proj_mlp.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[18].proj_mlp.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].proj_mlp.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[18].proj_mlp.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[18].proj_mlp.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].proj_mlp.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[18].proj_mlp.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].proj_mlp._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].proj_mlp._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].proj_mlp.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[18].proj_mlp.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[18].proj_mlp.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].proj_mlp._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[18].proj_mlp._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].proj_mlp._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].proj_mlp._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].proj_mlp._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[18].proj_mlp._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[18].proj_mlp._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].proj_out, accessed_by=DictGetItemGuardAccessor(proj_out) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[18].proj_out, 140533126826928) # hidden_states = gate * self.proj_out(hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:98 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].proj_out.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[18].proj_out.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].proj_out.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[18].proj_out.training, 140591004393408) # hidden_states = gate * self.proj_out(hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:98 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].proj_out._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].proj_out.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[18].proj_out.lora_A, 140533126642848) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].proj_out.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].proj_out.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[18].proj_out.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].proj_out.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[18].proj_out.lora_A['default_0'], 140533126627584) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].proj_out.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].proj_out.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[18].proj_out.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].proj_out.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].proj_out.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[18].proj_out.lora_A['default_0'].weight, 140537321368896) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].proj_out.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[18].proj_out.lora_B, 140533126628352) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].proj_out.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].proj_out.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[18].proj_out.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].proj_out.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[18].proj_out.lora_B['default_0'], 140533126629984) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].proj_out.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].proj_out.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[18].proj_out.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].proj_out.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[18].proj_out.base_layer, 140581770773984) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].proj_out.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].proj_out.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[18].proj_out.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].proj_out.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[18].proj_out.lora_dropout, 140533126827360) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].proj_out.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].proj_out.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[18].proj_out.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].proj_out.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[18].proj_out.lora_dropout['default_0'], 140533126825296) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].proj_out.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].proj_out.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[18].proj_out.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].proj_out.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[18].proj_out.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[18].proj_out.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].proj_out.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[18].proj_out.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].proj_out.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[18].proj_out.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[18].proj_out.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].proj_out.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[18].proj_out.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].proj_out._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].proj_out._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].proj_out.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[18].proj_out.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[18].proj_out.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].proj_out._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[18].proj_out._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].proj_out._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].proj_out._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].proj_out._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[18].proj_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[18].proj_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19], accessed_by=GetItemGuardAccessor(19) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[19], 140581770773648) # for index_block, block in enumerate(self.single_transformer_blocks): # diffusers/src/diffusers/models/transformers/transformer_flux.py:509 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[19].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[19].training, 140591004393440) # for index_block, block in enumerate(self.single_transformer_blocks): # diffusers/src/diffusers/models/transformers/transformer_flux.py:509 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].attn, accessed_by=DictGetItemGuardAccessor(attn) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[19].attn, 140581770774848) # attn_output = self.attn( # diffusers/src/diffusers/models/transformers/transformer_flux.py:91 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].attn.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[19].attn.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].attn.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[19].attn.training, 140591004393440) # attn_output = self.attn( # diffusers/src/diffusers/models/transformers/transformer_flux.py:91 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].attn._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].attn.to_k, accessed_by=DictGetItemGuardAccessor(to_k) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[19].attn.to_k, 140533126681056) # key = attn.to_k(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1717 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].attn.to_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[19].attn.to_k.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].attn.to_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[19].attn.to_k.training, 140591004393408) # key = attn.to_k(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1717 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].attn.to_k._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].attn.to_k.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[19].attn.to_k.lora_A, 140533126691040) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].attn.to_k.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].attn.to_k.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[19].attn.to_k.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].attn.to_k.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[19].attn.to_k.lora_A['default_0'], 140533127764416) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].attn.to_k.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].attn.to_k.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[19].attn.to_k.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].attn.to_k.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].attn.to_k.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[19].attn.to_k.lora_A['default_0'].weight, 140537321238944) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].attn.to_k.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[19].attn.to_k.lora_B, 140533126686480) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].attn.to_k.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].attn.to_k.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[19].attn.to_k.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].attn.to_k.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[19].attn.to_k.lora_B['default_0'], 140533127761776) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].attn.to_k.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].attn.to_k.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[19].attn.to_k.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].attn.to_k.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[19].attn.to_k.base_layer, 140581770774992) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].attn.to_k.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].attn.to_k.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[19].attn.to_k.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].attn.to_k.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[19].attn.to_k.lora_dropout, 140533126679040) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].attn.to_k.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].attn.to_k.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[19].attn.to_k.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].attn.to_k.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[19].attn.to_k.lora_dropout['default_0'], 140533126688880) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].attn.to_k.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].attn.to_k.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[19].attn.to_k.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].attn.to_k.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[19].attn.to_k.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[19].attn.to_k.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].attn.to_k.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[19].attn.to_k.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].attn.to_k.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[19].attn.to_k.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[19].attn.to_k.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].attn.to_k.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[19].attn.to_k.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].attn.to_k._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].attn.to_k._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].attn.to_k.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[19].attn.to_k.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[19].attn.to_k.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].attn.to_k._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[19].attn.to_k._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].attn.to_k._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].attn.to_k._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].attn.to_k._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[19].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[19].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].attn.to_q, accessed_by=DictGetItemGuardAccessor(to_q) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[19].attn.to_q, 140533126781472) # query = attn.to_q(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1716 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].attn.to_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[19].attn.to_q.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].attn.to_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[19].attn.to_q.training, 140591004393408) # query = attn.to_q(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1716 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].attn.to_q._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].attn.to_q.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[19].attn.to_q.lora_A, 140533126681392) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].attn.to_q.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].attn.to_q.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[19].attn.to_q.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].attn.to_q.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[19].attn.to_q.lora_A['default_0'], 140533126677552) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].attn.to_q.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].attn.to_q.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[19].attn.to_q.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].attn.to_q.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].attn.to_q.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[19].attn.to_q.lora_A['default_0'].weight, 140537321241744) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].attn.to_q.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[19].attn.to_q.lora_B, 140533126678368) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].attn.to_q.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].attn.to_q.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[19].attn.to_q.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].attn.to_q.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[19].attn.to_q.lora_B['default_0'], 140533126677456) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].attn.to_q.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].attn.to_q.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[19].attn.to_q.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].attn.to_q.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[19].attn.to_q.base_layer, 140581770775088) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].attn.to_q.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].attn.to_q.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[19].attn.to_q.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].attn.to_q.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[19].attn.to_q.lora_dropout, 140533126781568) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].attn.to_q.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].attn.to_q.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[19].attn.to_q.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].attn.to_q.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[19].attn.to_q.lora_dropout['default_0'], 140533126791024) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].attn.to_q.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].attn.to_q.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[19].attn.to_q.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].attn.to_q.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[19].attn.to_q.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[19].attn.to_q.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].attn.to_q.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[19].attn.to_q.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].attn.to_q.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[19].attn.to_q.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[19].attn.to_q.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].attn.to_q.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[19].attn.to_q.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].attn.to_q._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].attn.to_q._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].attn.to_q.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[19].attn.to_q.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[19].attn.to_q.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].attn.to_q._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[19].attn.to_q._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].attn.to_q._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].attn.to_q._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].attn.to_q._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[19].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[19].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].attn.to_v, accessed_by=DictGetItemGuardAccessor(to_v) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[19].attn.to_v, 140533127764944) # value = attn.to_v(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1718 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].attn.to_v.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[19].attn.to_v.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].attn.to_v.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[19].attn.to_v.training, 140591004393408) # value = attn.to_v(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1718 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].attn.to_v._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].attn.to_v.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[19].attn.to_v.lora_A, 140533127772816) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].attn.to_v.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].attn.to_v.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[19].attn.to_v.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].attn.to_v.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[19].attn.to_v.lora_A['default_0'], 140533127770320) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].attn.to_v.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].attn.to_v.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[19].attn.to_v.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].attn.to_v.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].attn.to_v.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[19].attn.to_v.lora_A['default_0'].weight, 140537321227184) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].attn.to_v.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[19].attn.to_v.lora_B, 140533127766480) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].attn.to_v.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].attn.to_v.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[19].attn.to_v.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].attn.to_v.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[19].attn.to_v.lora_B['default_0'], 140533127768448) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].attn.to_v.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].attn.to_v.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[19].attn.to_v.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].attn.to_v.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[19].attn.to_v.base_layer, 140581770775136) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].attn.to_v.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].attn.to_v.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[19].attn.to_v.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].attn.to_v.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[19].attn.to_v.lora_dropout, 140533127771712) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].attn.to_v.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].attn.to_v.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[19].attn.to_v.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].attn.to_v.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[19].attn.to_v.lora_dropout['default_0'], 140533127767056) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].attn.to_v.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].attn.to_v.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[19].attn.to_v.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].attn.to_v.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[19].attn.to_v.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[19].attn.to_v.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].attn.to_v.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[19].attn.to_v.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].attn.to_v.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[19].attn.to_v.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[19].attn.to_v.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].attn.to_v.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[19].attn.to_v.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].attn.to_v._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].attn.to_v._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].attn.to_v.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[19].attn.to_v.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[19].attn.to_v.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].attn.to_v._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[19].attn.to_v._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].attn.to_v._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].attn.to_v._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].attn.to_v._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[19].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[19].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].attn.norm_k, accessed_by=DictGetItemGuardAccessor(norm_k) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[19].attn.norm_k, 140581770775040) # if attn.norm_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1729 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].attn.norm_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[19].attn.norm_k.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].attn.norm_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[19].attn.norm_k.training, 140591004393440) # if attn.norm_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1729 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].attn.norm_k.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[19].attn.norm_k.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].attn.norm_k._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].attn.norm_k.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[19].attn.norm_k.weight, 140581772719056) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].attn.norm_k._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].attn.norm_k._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].attn.norm_k._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].attn.norm_k._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].attn.norm_q, accessed_by=DictGetItemGuardAccessor(norm_q) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[19].attn.norm_q, 140581770774896) # if attn.norm_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1727 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].attn.norm_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[19].attn.norm_q.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].attn.norm_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[19].attn.norm_q.training, 140591004393440) # if attn.norm_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1727 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].attn.norm_q.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[19].attn.norm_q.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].attn.norm_q._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].attn.norm_q.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[19].attn.norm_q.weight, 140581765885248) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].attn.norm_q._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].attn.norm_q._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].attn.norm_q._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].attn.norm_q._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].attn.heads, accessed_by=DictGetItemGuardAccessor(heads) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[19].attn.heads == 24 # head_dim = inner_dim // attn.heads # diffusers/src/diffusers/models/attention_processor.py:1721 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].attn.processor, accessed_by=DictGetItemGuardAccessor(processor) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[19].attn.processor, 93831581524080) # attn_parameters = set(inspect.signature(self.processor.__call__).parameters.keys()) # diffusers/src/diffusers/models/attention_processor.py:479 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[19].attn.processor, 140581770774800) # return self.processor( # diffusers/src/diffusers/models/attention_processor.py:490 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].attn._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].attn._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].attn._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].attn._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].attn.forward, accessed_by=GetAttrGuardAccessor(forward) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].attn.forward, accessed_by=FuncDefaultsGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].attn.forward.__defaults__[0], accessed_by=GetItemGuardAccessor(0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[19].attn.forward.__defaults__[0], 140591004478624) # batch_size, _, _ = hidden_states.shape if encoder_hidden_states is None else encoder_hidden_states.shape # diffusers/src/diffusers/models/attention_processor.py:1713 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].norm, accessed_by=DictGetItemGuardAccessor(norm) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[19].norm, 140581770774464) # norm_hidden_states, gate = self.norm(hidden_states, emb=temb) # diffusers/src/diffusers/models/transformers/transformer_flux.py:88 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].norm.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[19].norm.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].norm.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[19].norm.training, 140591004393440) # norm_hidden_states, gate = self.norm(hidden_states, emb=temb) # diffusers/src/diffusers/models/transformers/transformer_flux.py:88 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].norm._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].norm.norm, accessed_by=DictGetItemGuardAccessor(norm) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[19].norm.norm, 140581770774608) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:171 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].norm.norm.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].norm.norm.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[19].norm.norm.training, 140591004393440) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:171 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].norm.silu, accessed_by=DictGetItemGuardAccessor(silu) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[19].norm.silu, 140581770774512) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].norm.silu.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].norm.silu.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[19].norm.silu.training, 140591004393440) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].norm.linear, accessed_by=DictGetItemGuardAccessor(linear) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[19].norm.linear, 140533126902496) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].norm.linear.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[19].norm.linear.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].norm.linear.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[19].norm.linear.training, 140591004393408) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].norm.linear._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].norm.linear.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[19].norm.linear.lora_A, 140533125626416) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].norm.linear.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].norm.linear.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[19].norm.linear.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].norm.linear.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[19].norm.linear.lora_A['default_0'], 140533125620560) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].norm.linear.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].norm.linear.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[19].norm.linear.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].norm.linear.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].norm.linear.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[19].norm.linear.lora_A['default_0'].weight, 140537321241904) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].norm.linear.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[19].norm.linear.lora_B, 140533125612112) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].norm.linear.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].norm.linear.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[19].norm.linear.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].norm.linear.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[19].norm.linear.lora_B['default_0'], 140533125618400) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].norm.linear.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].norm.linear.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[19].norm.linear.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].norm.linear.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[19].norm.linear.base_layer, 140581770774560) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].norm.linear.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].norm.linear.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[19].norm.linear.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].norm.linear.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[19].norm.linear.lora_dropout, 140533126892848) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].norm.linear.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].norm.linear.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[19].norm.linear.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].norm.linear.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[19].norm.linear.lora_dropout['default_0'], 140533126900480) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].norm.linear.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].norm.linear.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[19].norm.linear.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].norm.linear.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[19].norm.linear.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[19].norm.linear.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].norm.linear.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[19].norm.linear.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].norm.linear.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[19].norm.linear.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[19].norm.linear.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].norm.linear.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[19].norm.linear.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].norm.linear._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].norm.linear._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].norm.linear.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[19].norm.linear.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[19].norm.linear.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].norm.linear._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[19].norm.linear._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].norm.linear._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].norm.linear._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].norm.linear._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[19].norm.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[19].norm.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].norm._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].norm._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].norm._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].norm._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].act_mlp, accessed_by=DictGetItemGuardAccessor(act_mlp) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[19].act_mlp, 140581770774704) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].act_mlp.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].act_mlp.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[19].act_mlp.training, 140591004393440) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].proj_mlp, accessed_by=DictGetItemGuardAccessor(proj_mlp) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[19].proj_mlp, 140533125612928) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].proj_mlp.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[19].proj_mlp.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].proj_mlp.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[19].proj_mlp.training, 140591004393408) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].proj_mlp._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].proj_mlp.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[19].proj_mlp.lora_A, 140533125621616) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].proj_mlp.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].proj_mlp.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[19].proj_mlp.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].proj_mlp.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[19].proj_mlp.lora_A['default_0'], 140533126726560) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].proj_mlp.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].proj_mlp.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[19].proj_mlp.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].proj_mlp.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].proj_mlp.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[19].proj_mlp.lora_A['default_0'].weight, 140537321228064) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].proj_mlp.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[19].proj_mlp.lora_B, 140533125613264) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].proj_mlp.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].proj_mlp.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[19].proj_mlp.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].proj_mlp.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[19].proj_mlp.lora_B['default_0'], 140533126729536) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].proj_mlp.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].proj_mlp.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[19].proj_mlp.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].proj_mlp.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[19].proj_mlp.base_layer, 140581770774656) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].proj_mlp.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].proj_mlp.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[19].proj_mlp.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].proj_mlp.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[19].proj_mlp.lora_dropout, 140533125612976) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].proj_mlp.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].proj_mlp.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[19].proj_mlp.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].proj_mlp.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[19].proj_mlp.lora_dropout['default_0'], 140533125627760) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].proj_mlp.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].proj_mlp.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[19].proj_mlp.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].proj_mlp.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[19].proj_mlp.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[19].proj_mlp.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].proj_mlp.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[19].proj_mlp.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].proj_mlp.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[19].proj_mlp.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[19].proj_mlp.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].proj_mlp.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[19].proj_mlp.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].proj_mlp._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].proj_mlp._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].proj_mlp.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[19].proj_mlp.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[19].proj_mlp.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].proj_mlp._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[19].proj_mlp._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].proj_mlp._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].proj_mlp._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].proj_mlp._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[19].proj_mlp._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[19].proj_mlp._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].proj_out, accessed_by=DictGetItemGuardAccessor(proj_out) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[19].proj_out, 140533126736400) # hidden_states = gate * self.proj_out(hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:98 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].proj_out.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[19].proj_out.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].proj_out.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[19].proj_out.training, 140591004393408) # hidden_states = gate * self.proj_out(hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:98 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].proj_out._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].proj_out.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[19].proj_out.lora_A, 140533126741536) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].proj_out.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].proj_out.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[19].proj_out.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].proj_out.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[19].proj_out.lora_A['default_0'], 140533126789632) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].proj_out.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].proj_out.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[19].proj_out.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].proj_out.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].proj_out.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[19].proj_out.lora_A['default_0'].weight, 140537321230784) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].proj_out.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[19].proj_out.lora_B, 140533126736448) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].proj_out.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].proj_out.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[19].proj_out.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].proj_out.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[19].proj_out.lora_B['default_0'], 140533126785264) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].proj_out.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].proj_out.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[19].proj_out.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].proj_out.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[19].proj_out.base_layer, 140581770774752) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].proj_out.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].proj_out.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[19].proj_out.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].proj_out.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[19].proj_out.lora_dropout, 140533126731072) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].proj_out.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].proj_out.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[19].proj_out.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].proj_out.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[19].proj_out.lora_dropout['default_0'], 140533126734144) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].proj_out.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].proj_out.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[19].proj_out.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].proj_out.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[19].proj_out.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[19].proj_out.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].proj_out.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[19].proj_out.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].proj_out.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[19].proj_out.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[19].proj_out.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].proj_out.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[19].proj_out.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].proj_out._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].proj_out._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].proj_out.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[19].proj_out.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[19].proj_out.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].proj_out._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[19].proj_out._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].proj_out._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].proj_out._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].proj_out._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[19].proj_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[19].proj_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20], accessed_by=GetItemGuardAccessor(20) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[20], 140581770774416) # for index_block, block in enumerate(self.single_transformer_blocks): # diffusers/src/diffusers/models/transformers/transformer_flux.py:509 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[20].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[20].training, 140591004393440) # for index_block, block in enumerate(self.single_transformer_blocks): # diffusers/src/diffusers/models/transformers/transformer_flux.py:509 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].attn, accessed_by=DictGetItemGuardAccessor(attn) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[20].attn, 140581770775616) # attn_output = self.attn( # diffusers/src/diffusers/models/transformers/transformer_flux.py:91 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].attn.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[20].attn.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].attn.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[20].attn.training, 140591004393440) # attn_output = self.attn( # diffusers/src/diffusers/models/transformers/transformer_flux.py:91 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].attn._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].attn.to_k, accessed_by=DictGetItemGuardAccessor(to_k) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[20].attn.to_k, 140533171996368) # key = attn.to_k(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1717 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].attn.to_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[20].attn.to_k.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].attn.to_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[20].attn.to_k.training, 140591004393408) # key = attn.to_k(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1717 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].attn.to_k._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].attn.to_k.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[20].attn.to_k.lora_A, 140533172003712) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].attn.to_k.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].attn.to_k.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[20].attn.to_k.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].attn.to_k.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[20].attn.to_k.lora_A['default_0'], 140533211641888) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].attn.to_k.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].attn.to_k.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[20].attn.to_k.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].attn.to_k.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].attn.to_k.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[20].attn.to_k.lora_A['default_0'].weight, 140537321106752) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].attn.to_k.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[20].attn.to_k.lora_B, 140533172214544) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].attn.to_k.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].attn.to_k.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[20].attn.to_k.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].attn.to_k.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[20].attn.to_k.lora_B['default_0'], 140533212588512) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].attn.to_k.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].attn.to_k.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[20].attn.to_k.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].attn.to_k.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[20].attn.to_k.base_layer, 140581770775760) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].attn.to_k.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].attn.to_k.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[20].attn.to_k.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].attn.to_k.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[20].attn.to_k.lora_dropout, 140533171995504) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].attn.to_k.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].attn.to_k.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[20].attn.to_k.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].attn.to_k.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[20].attn.to_k.lora_dropout['default_0'], 140533171996080) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].attn.to_k.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].attn.to_k.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[20].attn.to_k.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].attn.to_k.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[20].attn.to_k.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[20].attn.to_k.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].attn.to_k.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[20].attn.to_k.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].attn.to_k.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[20].attn.to_k.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[20].attn.to_k.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].attn.to_k.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[20].attn.to_k.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].attn.to_k._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].attn.to_k._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].attn.to_k.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[20].attn.to_k.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[20].attn.to_k.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].attn.to_k._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[20].attn.to_k._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].attn.to_k._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].attn.to_k._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].attn.to_k._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[20].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[20].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].attn.to_q, accessed_by=DictGetItemGuardAccessor(to_q) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[20].attn.to_q, 140533170507632) # query = attn.to_q(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1716 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].attn.to_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[20].attn.to_q.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].attn.to_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[20].attn.to_q.training, 140591004393408) # query = attn.to_q(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1716 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].attn.to_q._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].attn.to_q.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[20].attn.to_q.lora_A, 140533170222800) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].attn.to_q.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].attn.to_q.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[20].attn.to_q.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].attn.to_q.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[20].attn.to_q.lora_A['default_0'], 140533172083520) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].attn.to_q.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].attn.to_q.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[20].attn.to_q.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].attn.to_q.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].attn.to_q.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[20].attn.to_q.lora_A['default_0'].weight, 140537321102832) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].attn.to_q.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[20].attn.to_q.lora_B, 140533173123152) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].attn.to_q.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].attn.to_q.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[20].attn.to_q.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].attn.to_q.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[20].attn.to_q.lora_B['default_0'], 140533171998960) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].attn.to_q.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].attn.to_q.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[20].attn.to_q.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].attn.to_q.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[20].attn.to_q.base_layer, 140581770775856) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].attn.to_q.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].attn.to_q.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[20].attn.to_q.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].attn.to_q.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[20].attn.to_q.lora_dropout, 140533170508160) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].attn.to_q.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].attn.to_q.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[20].attn.to_q.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].attn.to_q.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[20].attn.to_q.lora_dropout['default_0'], 140533170508016) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].attn.to_q.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].attn.to_q.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[20].attn.to_q.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].attn.to_q.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[20].attn.to_q.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[20].attn.to_q.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].attn.to_q.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[20].attn.to_q.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].attn.to_q.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[20].attn.to_q.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[20].attn.to_q.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].attn.to_q.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[20].attn.to_q.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].attn.to_q._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].attn.to_q._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].attn.to_q.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[20].attn.to_q.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[20].attn.to_q.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].attn.to_q._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[20].attn.to_q._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].attn.to_q._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].attn.to_q._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].attn.to_q._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[20].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[20].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].attn.to_v, accessed_by=DictGetItemGuardAccessor(to_v) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[20].attn.to_v, 140533170772848) # value = attn.to_v(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1718 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].attn.to_v.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[20].attn.to_v.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].attn.to_v.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[20].attn.to_v.training, 140591004393408) # value = attn.to_v(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1718 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].attn.to_v._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].attn.to_v.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[20].attn.to_v.lora_A, 140533211503328) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].attn.to_v.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].attn.to_v.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[20].attn.to_v.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].attn.to_v.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[20].attn.to_v.lora_A['default_0'], 140533212355680) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].attn.to_v.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].attn.to_v.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[20].attn.to_v.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].attn.to_v.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].attn.to_v.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[20].attn.to_v.lora_A['default_0'].weight, 140537321102032) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].attn.to_v.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[20].attn.to_v.lora_B, 140533212445088) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].attn.to_v.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].attn.to_v.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[20].attn.to_v.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].attn.to_v.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[20].attn.to_v.lora_B['default_0'], 140533212354816) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].attn.to_v.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].attn.to_v.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[20].attn.to_v.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].attn.to_v.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[20].attn.to_v.base_layer, 140581770775904) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].attn.to_v.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].attn.to_v.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[20].attn.to_v.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].attn.to_v.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[20].attn.to_v.lora_dropout, 140533211512784) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].attn.to_v.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].attn.to_v.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[20].attn.to_v.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].attn.to_v.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[20].attn.to_v.lora_dropout['default_0'], 140533211506880) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].attn.to_v.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].attn.to_v.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[20].attn.to_v.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].attn.to_v.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[20].attn.to_v.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[20].attn.to_v.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].attn.to_v.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[20].attn.to_v.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].attn.to_v.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[20].attn.to_v.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[20].attn.to_v.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].attn.to_v.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[20].attn.to_v.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].attn.to_v._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].attn.to_v._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].attn.to_v.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[20].attn.to_v.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[20].attn.to_v.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].attn.to_v._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[20].attn.to_v._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].attn.to_v._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].attn.to_v._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].attn.to_v._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[20].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[20].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].attn.norm_k, accessed_by=DictGetItemGuardAccessor(norm_k) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[20].attn.norm_k, 140581770775808) # if attn.norm_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1729 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].attn.norm_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[20].attn.norm_k.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].attn.norm_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[20].attn.norm_k.training, 140591004393440) # if attn.norm_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1729 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].attn.norm_k.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[20].attn.norm_k.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].attn.norm_k._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].attn.norm_k.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[20].attn.norm_k.weight, 140581773351712) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].attn.norm_k._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].attn.norm_k._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].attn.norm_k._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].attn.norm_k._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].attn.norm_q, accessed_by=DictGetItemGuardAccessor(norm_q) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[20].attn.norm_q, 140581770775664) # if attn.norm_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1727 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].attn.norm_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[20].attn.norm_q.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].attn.norm_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[20].attn.norm_q.training, 140591004393440) # if attn.norm_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1727 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].attn.norm_q.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[20].attn.norm_q.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].attn.norm_q._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].attn.norm_q.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[20].attn.norm_q.weight, 140581772774352) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].attn.norm_q._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].attn.norm_q._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].attn.norm_q._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].attn.norm_q._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].attn.heads, accessed_by=DictGetItemGuardAccessor(heads) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[20].attn.heads == 24 # head_dim = inner_dim // attn.heads # diffusers/src/diffusers/models/attention_processor.py:1721 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].attn.processor, accessed_by=DictGetItemGuardAccessor(processor) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[20].attn.processor, 93831581524080) # attn_parameters = set(inspect.signature(self.processor.__call__).parameters.keys()) # diffusers/src/diffusers/models/attention_processor.py:479 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[20].attn.processor, 140581770775568) # return self.processor( # diffusers/src/diffusers/models/attention_processor.py:490 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].attn._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].attn._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].attn._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].attn._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].attn.forward, accessed_by=GetAttrGuardAccessor(forward) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].attn.forward, accessed_by=FuncDefaultsGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].attn.forward.__defaults__[0], accessed_by=GetItemGuardAccessor(0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[20].attn.forward.__defaults__[0], 140591004478624) # batch_size, _, _ = hidden_states.shape if encoder_hidden_states is None else encoder_hidden_states.shape # diffusers/src/diffusers/models/attention_processor.py:1713 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].norm, accessed_by=DictGetItemGuardAccessor(norm) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[20].norm, 140581770775232) # norm_hidden_states, gate = self.norm(hidden_states, emb=temb) # diffusers/src/diffusers/models/transformers/transformer_flux.py:88 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].norm.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[20].norm.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].norm.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[20].norm.training, 140591004393440) # norm_hidden_states, gate = self.norm(hidden_states, emb=temb) # diffusers/src/diffusers/models/transformers/transformer_flux.py:88 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].norm._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].norm.norm, accessed_by=DictGetItemGuardAccessor(norm) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[20].norm.norm, 140581770775376) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:171 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].norm.norm.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].norm.norm.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[20].norm.norm.training, 140591004393440) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:171 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].norm.silu, accessed_by=DictGetItemGuardAccessor(silu) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[20].norm.silu, 140581770775280) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].norm.silu.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].norm.silu.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[20].norm.silu.training, 140591004393440) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].norm.linear, accessed_by=DictGetItemGuardAccessor(linear) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[20].norm.linear, 140533130610240) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].norm.linear.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[20].norm.linear.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].norm.linear.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[20].norm.linear.training, 140591004393408) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].norm.linear._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].norm.linear.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[20].norm.linear.lora_A, 140533169289488) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].norm.linear.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].norm.linear.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[20].norm.linear.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].norm.linear.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[20].norm.linear.lora_A['default_0'], 140533169282624) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].norm.linear.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].norm.linear.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[20].norm.linear.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].norm.linear.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].norm.linear.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[20].norm.linear.lora_A['default_0'].weight, 140537321110432) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].norm.linear.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[20].norm.linear.lora_B, 140533169283200) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].norm.linear.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].norm.linear.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[20].norm.linear.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].norm.linear.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[20].norm.linear.lora_B['default_0'], 140533173221840) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].norm.linear.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].norm.linear.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[20].norm.linear.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].norm.linear.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[20].norm.linear.base_layer, 140581770775328) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].norm.linear.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].norm.linear.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[20].norm.linear.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].norm.linear.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[20].norm.linear.lora_dropout, 140533129201648) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].norm.linear.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].norm.linear.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[20].norm.linear.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].norm.linear.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[20].norm.linear.lora_dropout['default_0'], 140533129202560) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].norm.linear.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].norm.linear.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[20].norm.linear.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].norm.linear.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[20].norm.linear.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[20].norm.linear.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].norm.linear.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[20].norm.linear.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].norm.linear.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[20].norm.linear.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[20].norm.linear.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].norm.linear.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[20].norm.linear.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].norm.linear._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].norm.linear._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].norm.linear.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[20].norm.linear.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[20].norm.linear.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].norm.linear._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[20].norm.linear._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].norm.linear._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].norm.linear._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].norm.linear._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[20].norm.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[20].norm.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].norm._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].norm._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].norm._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].norm._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].act_mlp, accessed_by=DictGetItemGuardAccessor(act_mlp) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[20].act_mlp, 140581770775472) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].act_mlp.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].act_mlp.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[20].act_mlp.training, 140591004393440) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].proj_mlp, accessed_by=DictGetItemGuardAccessor(proj_mlp) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[20].proj_mlp, 140533127067488) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].proj_mlp.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[20].proj_mlp.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].proj_mlp.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[20].proj_mlp.training, 140591004393408) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].proj_mlp._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].proj_mlp.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[20].proj_mlp.lora_A, 140533170878352) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].proj_mlp.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].proj_mlp.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[20].proj_mlp.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].proj_mlp.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[20].proj_mlp.lora_A['default_0'], 140533170773808) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].proj_mlp.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].proj_mlp.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[20].proj_mlp.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].proj_mlp.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].proj_mlp.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[20].proj_mlp.lora_A['default_0'].weight, 140537321097552) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].proj_mlp.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[20].proj_mlp.lora_B, 140533170775200) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].proj_mlp.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].proj_mlp.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[20].proj_mlp.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].proj_mlp.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[20].proj_mlp.lora_B['default_0'], 140533170773904) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].proj_mlp.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].proj_mlp.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[20].proj_mlp.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].proj_mlp.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[20].proj_mlp.base_layer, 140581770775424) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].proj_mlp.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].proj_mlp.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[20].proj_mlp.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].proj_mlp.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[20].proj_mlp.lora_dropout, 140533171023600) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].proj_mlp.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].proj_mlp.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[20].proj_mlp.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].proj_mlp.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[20].proj_mlp.lora_dropout['default_0'], 140533171019760) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].proj_mlp.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].proj_mlp.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[20].proj_mlp.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].proj_mlp.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[20].proj_mlp.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[20].proj_mlp.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].proj_mlp.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[20].proj_mlp.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].proj_mlp.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[20].proj_mlp.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[20].proj_mlp.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].proj_mlp.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[20].proj_mlp.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].proj_mlp._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].proj_mlp._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].proj_mlp.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[20].proj_mlp.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[20].proj_mlp.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].proj_mlp._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[20].proj_mlp._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].proj_mlp._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].proj_mlp._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].proj_mlp._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[20].proj_mlp._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[20].proj_mlp._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].proj_out, accessed_by=DictGetItemGuardAccessor(proj_out) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[20].proj_out, 140533171453376) # hidden_states = gate * self.proj_out(hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:98 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].proj_out.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[20].proj_out.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].proj_out.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[20].proj_out.training, 140591004393408) # hidden_states = gate * self.proj_out(hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:98 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].proj_out._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].proj_out.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[20].proj_out.lora_A, 140533170611360) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].proj_out.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].proj_out.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[20].proj_out.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].proj_out.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[20].proj_out.lora_A['default_0'], 140533170514304) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].proj_out.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].proj_out.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[20].proj_out.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].proj_out.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].proj_out.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[20].proj_out.lora_A['default_0'].weight, 140537321103232) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].proj_out.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[20].proj_out.lora_B, 140533170602768) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].proj_out.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].proj_out.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[20].proj_out.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].proj_out.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[20].proj_out.lora_B['default_0'], 140533170507104) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].proj_out.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].proj_out.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[20].proj_out.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].proj_out.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[20].proj_out.base_layer, 140581770775520) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].proj_out.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].proj_out.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[20].proj_out.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].proj_out.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[20].proj_out.lora_dropout, 140533170612944) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].proj_out.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].proj_out.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[20].proj_out.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].proj_out.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[20].proj_out.lora_dropout['default_0'], 140533217229072) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].proj_out.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].proj_out.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[20].proj_out.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].proj_out.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[20].proj_out.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[20].proj_out.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].proj_out.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[20].proj_out.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].proj_out.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[20].proj_out.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[20].proj_out.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].proj_out.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[20].proj_out.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].proj_out._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].proj_out._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].proj_out.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[20].proj_out.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[20].proj_out.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].proj_out._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[20].proj_out._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].proj_out._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].proj_out._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].proj_out._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[20].proj_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[20].proj_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21], accessed_by=GetItemGuardAccessor(21) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[21], 140581770775184) # for index_block, block in enumerate(self.single_transformer_blocks): # diffusers/src/diffusers/models/transformers/transformer_flux.py:509 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[21].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[21].training, 140591004393440) # for index_block, block in enumerate(self.single_transformer_blocks): # diffusers/src/diffusers/models/transformers/transformer_flux.py:509 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].attn, accessed_by=DictGetItemGuardAccessor(attn) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[21].attn, 140581770776384) # attn_output = self.attn( # diffusers/src/diffusers/models/transformers/transformer_flux.py:91 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].attn.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[21].attn.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].attn.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[21].attn.training, 140591004393440) # attn_output = self.attn( # diffusers/src/diffusers/models/transformers/transformer_flux.py:91 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].attn._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].attn.to_k, accessed_by=DictGetItemGuardAccessor(to_k) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[21].attn.to_k, 140533214350640) # key = attn.to_k(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1717 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].attn.to_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[21].attn.to_k.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].attn.to_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[21].attn.to_k.training, 140591004393408) # key = attn.to_k(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1717 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].attn.to_k._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].attn.to_k.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[21].attn.to_k.lora_A, 140533214352416) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].attn.to_k.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].attn.to_k.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[21].attn.to_k.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].attn.to_k.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[21].attn.to_k.lora_A['default_0'], 140533214280096) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].attn.to_k.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].attn.to_k.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[21].attn.to_k.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].attn.to_k.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].attn.to_k.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[21].attn.to_k.lora_A['default_0'].weight, 140537320858672) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].attn.to_k.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[21].attn.to_k.lora_B, 140533216276544) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].attn.to_k.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].attn.to_k.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[21].attn.to_k.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].attn.to_k.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[21].attn.to_k.lora_B['default_0'], 140533214270544) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].attn.to_k.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].attn.to_k.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[21].attn.to_k.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].attn.to_k.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[21].attn.to_k.base_layer, 140581770776528) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].attn.to_k.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].attn.to_k.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[21].attn.to_k.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].attn.to_k.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[21].attn.to_k.lora_dropout, 140533214362352) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].attn.to_k.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].attn.to_k.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[21].attn.to_k.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].attn.to_k.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[21].attn.to_k.lora_dropout['default_0'], 140533214362208) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].attn.to_k.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].attn.to_k.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[21].attn.to_k.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].attn.to_k.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[21].attn.to_k.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[21].attn.to_k.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].attn.to_k.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[21].attn.to_k.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].attn.to_k.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[21].attn.to_k.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[21].attn.to_k.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].attn.to_k.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[21].attn.to_k.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].attn.to_k._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].attn.to_k._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].attn.to_k.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[21].attn.to_k.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[21].attn.to_k.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].attn.to_k._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[21].attn.to_k._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].attn.to_k._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].attn.to_k._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].attn.to_k._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[21].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[21].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].attn.to_q, accessed_by=DictGetItemGuardAccessor(to_q) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[21].attn.to_q, 140533213199712) # query = attn.to_q(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1716 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].attn.to_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[21].attn.to_q.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].attn.to_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[21].attn.to_q.training, 140591004393408) # query = attn.to_q(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1716 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].attn.to_q._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].attn.to_q.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[21].attn.to_q.lora_A, 140533214787232) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].attn.to_q.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].attn.to_q.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[21].attn.to_q.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].attn.to_q.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[21].attn.to_q.lora_A['default_0'], 140533214349344) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].attn.to_q.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].attn.to_q.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[21].attn.to_q.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].attn.to_q.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].attn.to_q.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[21].attn.to_q.lora_A['default_0'].weight, 140537320863552) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].attn.to_q.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[21].attn.to_q.lora_B, 140533214560208) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].attn.to_q.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].attn.to_q.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[21].attn.to_q.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].attn.to_q.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[21].attn.to_q.lora_B['default_0'], 140533214355200) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].attn.to_q.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].attn.to_q.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[21].attn.to_q.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].attn.to_q.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[21].attn.to_q.base_layer, 140581770776624) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].attn.to_q.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].attn.to_q.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[21].attn.to_q.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].attn.to_q.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[21].attn.to_q.lora_dropout, 140533214789392) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].attn.to_q.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].attn.to_q.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[21].attn.to_q.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].attn.to_q.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[21].attn.to_q.lora_dropout['default_0'], 140533213195920) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].attn.to_q.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].attn.to_q.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[21].attn.to_q.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].attn.to_q.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[21].attn.to_q.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[21].attn.to_q.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].attn.to_q.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[21].attn.to_q.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].attn.to_q.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[21].attn.to_q.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[21].attn.to_q.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].attn.to_q.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[21].attn.to_q.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].attn.to_q._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].attn.to_q._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].attn.to_q.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[21].attn.to_q.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[21].attn.to_q.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].attn.to_q._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[21].attn.to_q._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].attn.to_q._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].attn.to_q._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].attn.to_q._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[21].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[21].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].attn.to_v, accessed_by=DictGetItemGuardAccessor(to_v) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[21].attn.to_v, 140533214281632) # value = attn.to_v(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1718 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].attn.to_v.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[21].attn.to_v.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].attn.to_v.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[21].attn.to_v.training, 140591004393408) # value = attn.to_v(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1718 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].attn.to_v._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].attn.to_v.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[21].attn.to_v.lora_A, 140533216516064) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].attn.to_v.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].attn.to_v.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[21].attn.to_v.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].attn.to_v.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[21].attn.to_v.lora_A['default_0'], 140533216350640) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].attn.to_v.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].attn.to_v.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[21].attn.to_v.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].attn.to_v.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].attn.to_v.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[21].attn.to_v.lora_A['default_0'].weight, 140537320855552) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].attn.to_v.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[21].attn.to_v.lora_B, 140533216518800) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].attn.to_v.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].attn.to_v.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[21].attn.to_v.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].attn.to_v.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[21].attn.to_v.lora_B['default_0'], 140533218311616) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].attn.to_v.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].attn.to_v.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[21].attn.to_v.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].attn.to_v.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[21].attn.to_v.base_layer, 140581770776672) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].attn.to_v.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].attn.to_v.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[21].attn.to_v.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].attn.to_v.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[21].attn.to_v.lora_dropout, 140533214272224) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].attn.to_v.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].attn.to_v.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[21].attn.to_v.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].attn.to_v.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[21].attn.to_v.lora_dropout['default_0'], 140533214276160) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].attn.to_v.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].attn.to_v.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[21].attn.to_v.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].attn.to_v.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[21].attn.to_v.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[21].attn.to_v.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].attn.to_v.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[21].attn.to_v.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].attn.to_v.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[21].attn.to_v.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[21].attn.to_v.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].attn.to_v.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[21].attn.to_v.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].attn.to_v._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].attn.to_v._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].attn.to_v.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[21].attn.to_v.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[21].attn.to_v.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].attn.to_v._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[21].attn.to_v._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].attn.to_v._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].attn.to_v._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].attn.to_v._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[21].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[21].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].attn.norm_k, accessed_by=DictGetItemGuardAccessor(norm_k) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[21].attn.norm_k, 140581770776576) # if attn.norm_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1729 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].attn.norm_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[21].attn.norm_k.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].attn.norm_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[21].attn.norm_k.training, 140591004393440) # if attn.norm_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1729 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].attn.norm_k.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[21].attn.norm_k.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].attn.norm_k._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].attn.norm_k.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[21].attn.norm_k.weight, 140581772774032) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].attn.norm_k._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].attn.norm_k._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].attn.norm_k._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].attn.norm_k._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].attn.norm_q, accessed_by=DictGetItemGuardAccessor(norm_q) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[21].attn.norm_q, 140581770776432) # if attn.norm_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1727 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].attn.norm_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[21].attn.norm_q.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].attn.norm_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[21].attn.norm_q.training, 140591004393440) # if attn.norm_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1727 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].attn.norm_q.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[21].attn.norm_q.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].attn.norm_q._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].attn.norm_q.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[21].attn.norm_q.weight, 140581772782992) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].attn.norm_q._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].attn.norm_q._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].attn.norm_q._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].attn.norm_q._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].attn.heads, accessed_by=DictGetItemGuardAccessor(heads) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[21].attn.heads == 24 # head_dim = inner_dim // attn.heads # diffusers/src/diffusers/models/attention_processor.py:1721 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].attn.processor, accessed_by=DictGetItemGuardAccessor(processor) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[21].attn.processor, 93831581524080) # attn_parameters = set(inspect.signature(self.processor.__call__).parameters.keys()) # diffusers/src/diffusers/models/attention_processor.py:479 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[21].attn.processor, 140581770776336) # return self.processor( # diffusers/src/diffusers/models/attention_processor.py:490 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].attn._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].attn._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].attn._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].attn._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].attn.forward, accessed_by=GetAttrGuardAccessor(forward) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].attn.forward, accessed_by=FuncDefaultsGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].attn.forward.__defaults__[0], accessed_by=GetItemGuardAccessor(0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[21].attn.forward.__defaults__[0], 140591004478624) # batch_size, _, _ = hidden_states.shape if encoder_hidden_states is None else encoder_hidden_states.shape # diffusers/src/diffusers/models/attention_processor.py:1713 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].norm, accessed_by=DictGetItemGuardAccessor(norm) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[21].norm, 140581770776000) # norm_hidden_states, gate = self.norm(hidden_states, emb=temb) # diffusers/src/diffusers/models/transformers/transformer_flux.py:88 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].norm.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[21].norm.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].norm.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[21].norm.training, 140591004393440) # norm_hidden_states, gate = self.norm(hidden_states, emb=temb) # diffusers/src/diffusers/models/transformers/transformer_flux.py:88 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].norm._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].norm.norm, accessed_by=DictGetItemGuardAccessor(norm) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[21].norm.norm, 140581770776144) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:171 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].norm.norm.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].norm.norm.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[21].norm.norm.training, 140591004393440) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:171 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].norm.silu, accessed_by=DictGetItemGuardAccessor(silu) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[21].norm.silu, 140581770776048) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].norm.silu.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].norm.silu.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[21].norm.silu.training, 140591004393440) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].norm.linear, accessed_by=DictGetItemGuardAccessor(linear) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[21].norm.linear, 140533212356928) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].norm.linear.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[21].norm.linear.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].norm.linear.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[21].norm.linear.training, 140591004393408) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].norm.linear._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].norm.linear.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[21].norm.linear.lora_A, 140533214181504) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].norm.linear.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].norm.linear.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[21].norm.linear.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].norm.linear.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[21].norm.linear.lora_A['default_0'], 140533213820672) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].norm.linear.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].norm.linear.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[21].norm.linear.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].norm.linear.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].norm.linear.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[21].norm.linear.lora_A['default_0'].weight, 140537321095952) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].norm.linear.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[21].norm.linear.lora_B, 140533214173584) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].norm.linear.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].norm.linear.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[21].norm.linear.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].norm.linear.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[21].norm.linear.lora_B['default_0'], 140533213815152) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].norm.linear.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].norm.linear.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[21].norm.linear.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].norm.linear.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[21].norm.linear.base_layer, 140581770776096) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].norm.linear.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].norm.linear.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[21].norm.linear.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].norm.linear.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[21].norm.linear.lora_dropout, 140533212359520) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].norm.linear.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].norm.linear.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[21].norm.linear.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].norm.linear.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[21].norm.linear.lora_dropout['default_0'], 140533212359184) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].norm.linear.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].norm.linear.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[21].norm.linear.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].norm.linear.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[21].norm.linear.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[21].norm.linear.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].norm.linear.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[21].norm.linear.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].norm.linear.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[21].norm.linear.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[21].norm.linear.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].norm.linear.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[21].norm.linear.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].norm.linear._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].norm.linear._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].norm.linear.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[21].norm.linear.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[21].norm.linear.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].norm.linear._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[21].norm.linear._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].norm.linear._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].norm.linear._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].norm.linear._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[21].norm.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[21].norm.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].norm._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].norm._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].norm._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].norm._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].act_mlp, accessed_by=DictGetItemGuardAccessor(act_mlp) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[21].act_mlp, 140581770776240) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].act_mlp.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].act_mlp.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[21].act_mlp.training, 140591004393440) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].proj_mlp, accessed_by=DictGetItemGuardAccessor(proj_mlp) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[21].proj_mlp, 140533213813328) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].proj_mlp.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[21].proj_mlp.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].proj_mlp.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[21].proj_mlp.training, 140591004393408) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].proj_mlp._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].proj_mlp.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[21].proj_mlp.lora_A, 140533213672880) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].proj_mlp.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].proj_mlp.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[21].proj_mlp.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].proj_mlp.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[21].proj_mlp.lora_A['default_0'], 140533213305184) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].proj_mlp.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].proj_mlp.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[21].proj_mlp.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].proj_mlp.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].proj_mlp.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[21].proj_mlp.lora_A['default_0'].weight, 140537320851392) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].proj_mlp.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[21].proj_mlp.lora_B, 140533213662224) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].proj_mlp.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].proj_mlp.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[21].proj_mlp.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].proj_mlp.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[21].proj_mlp.lora_B['default_0'], 140533213310560) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].proj_mlp.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].proj_mlp.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[21].proj_mlp.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].proj_mlp.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[21].proj_mlp.base_layer, 140581770776192) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].proj_mlp.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].proj_mlp.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[21].proj_mlp.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].proj_mlp.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[21].proj_mlp.lora_dropout, 140533213667840) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].proj_mlp.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].proj_mlp.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[21].proj_mlp.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].proj_mlp.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[21].proj_mlp.lora_dropout['default_0'], 140533213674176) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].proj_mlp.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].proj_mlp.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[21].proj_mlp.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].proj_mlp.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[21].proj_mlp.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[21].proj_mlp.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].proj_mlp.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[21].proj_mlp.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].proj_mlp.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[21].proj_mlp.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[21].proj_mlp.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].proj_mlp.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[21].proj_mlp.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].proj_mlp._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].proj_mlp._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].proj_mlp.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[21].proj_mlp.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[21].proj_mlp.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].proj_mlp._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[21].proj_mlp._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].proj_mlp._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].proj_mlp._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].proj_mlp._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[21].proj_mlp._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[21].proj_mlp._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].proj_out, accessed_by=DictGetItemGuardAccessor(proj_out) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[21].proj_out, 140533213309264) # hidden_states = gate * self.proj_out(hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:98 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].proj_out.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[21].proj_out.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].proj_out.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[21].proj_out.training, 140591004393408) # hidden_states = gate * self.proj_out(hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:98 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].proj_out._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].proj_out.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[21].proj_out.lora_A, 140533213308592) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].proj_out.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].proj_out.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[21].proj_out.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].proj_out.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[21].proj_out.lora_A['default_0'], 140533213189584) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].proj_out.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].proj_out.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[21].proj_out.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].proj_out.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].proj_out.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[21].proj_out.lora_A['default_0'].weight, 140537320862112) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].proj_out.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[21].proj_out.lora_B, 140533213308688) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].proj_out.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].proj_out.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[21].proj_out.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].proj_out.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[21].proj_out.lora_B['default_0'], 140533213189776) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].proj_out.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].proj_out.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[21].proj_out.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].proj_out.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[21].proj_out.base_layer, 140581770776288) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].proj_out.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].proj_out.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[21].proj_out.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].proj_out.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[21].proj_out.lora_dropout, 140533213307008) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].proj_out.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].proj_out.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[21].proj_out.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].proj_out.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[21].proj_out.lora_dropout['default_0'], 140533213309600) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].proj_out.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].proj_out.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[21].proj_out.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].proj_out.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[21].proj_out.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[21].proj_out.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].proj_out.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[21].proj_out.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].proj_out.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[21].proj_out.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[21].proj_out.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].proj_out.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[21].proj_out.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].proj_out._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].proj_out._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].proj_out.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[21].proj_out.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[21].proj_out.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].proj_out._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[21].proj_out._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].proj_out._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].proj_out._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].proj_out._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[21].proj_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[21].proj_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22], accessed_by=GetItemGuardAccessor(22) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[22], 140581770775952) # for index_block, block in enumerate(self.single_transformer_blocks): # diffusers/src/diffusers/models/transformers/transformer_flux.py:509 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[22].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[22].training, 140591004393440) # for index_block, block in enumerate(self.single_transformer_blocks): # diffusers/src/diffusers/models/transformers/transformer_flux.py:509 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].attn, accessed_by=DictGetItemGuardAccessor(attn) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[22].attn, 140581770777152) # attn_output = self.attn( # diffusers/src/diffusers/models/transformers/transformer_flux.py:91 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].attn.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[22].attn.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].attn.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[22].attn.training, 140591004393440) # attn_output = self.attn( # diffusers/src/diffusers/models/transformers/transformer_flux.py:91 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].attn._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].attn.to_k, accessed_by=DictGetItemGuardAccessor(to_k) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[22].attn.to_k, 140533127354960) # key = attn.to_k(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1717 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].attn.to_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[22].attn.to_k.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].attn.to_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[22].attn.to_k.training, 140591004393408) # key = attn.to_k(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1717 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].attn.to_k._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].attn.to_k.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[22].attn.to_k.lora_A, 140533127362832) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].attn.to_k.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].attn.to_k.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[22].attn.to_k.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].attn.to_k.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[22].attn.to_k.lora_A['default_0'], 140533127539168) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].attn.to_k.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].attn.to_k.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[22].attn.to_k.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].attn.to_k.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].attn.to_k.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[22].attn.to_k.lora_A['default_0'].weight, 140537320743424) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].attn.to_k.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[22].attn.to_k.lora_B, 140533127361200) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].attn.to_k.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].attn.to_k.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[22].attn.to_k.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].attn.to_k.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[22].attn.to_k.lora_B['default_0'], 140533127540656) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].attn.to_k.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].attn.to_k.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[22].attn.to_k.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].attn.to_k.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[22].attn.to_k.base_layer, 140581770777296) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].attn.to_k.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].attn.to_k.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[22].attn.to_k.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].attn.to_k.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[22].attn.to_k.lora_dropout, 140533127363552) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].attn.to_k.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].attn.to_k.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[22].attn.to_k.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].attn.to_k.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[22].attn.to_k.lora_dropout['default_0'], 140533127362784) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].attn.to_k.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].attn.to_k.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[22].attn.to_k.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].attn.to_k.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[22].attn.to_k.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[22].attn.to_k.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].attn.to_k.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[22].attn.to_k.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].attn.to_k.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[22].attn.to_k.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[22].attn.to_k.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].attn.to_k.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[22].attn.to_k.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].attn.to_k._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].attn.to_k._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].attn.to_k.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[22].attn.to_k.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[22].attn.to_k.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].attn.to_k._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[22].attn.to_k._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].attn.to_k._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].attn.to_k._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].attn.to_k._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[22].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[22].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].attn.to_q, accessed_by=DictGetItemGuardAccessor(to_q) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[22].attn.to_q, 140533127356064) # query = attn.to_q(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1716 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].attn.to_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[22].attn.to_q.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].attn.to_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[22].attn.to_q.training, 140591004393408) # query = attn.to_q(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1716 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].attn.to_q._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].attn.to_q.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[22].attn.to_q.lora_A, 140533127357552) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].attn.to_q.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].attn.to_q.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[22].attn.to_q.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].attn.to_q.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[22].attn.to_q.lora_A['default_0'], 140533127361296) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].attn.to_q.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].attn.to_q.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[22].attn.to_q.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].attn.to_q.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].attn.to_q.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[22].attn.to_q.lora_A['default_0'].weight, 140537320739184) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].attn.to_q.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[22].attn.to_q.lora_B, 140533127357408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].attn.to_q.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].attn.to_q.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[22].attn.to_q.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].attn.to_q.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[22].attn.to_q.lora_B['default_0'], 140533127361248) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].attn.to_q.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].attn.to_q.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[22].attn.to_q.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].attn.to_q.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[22].attn.to_q.base_layer, 140581770777392) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].attn.to_q.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].attn.to_q.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[22].attn.to_q.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].attn.to_q.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[22].attn.to_q.lora_dropout, 140533127357168) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].attn.to_q.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].attn.to_q.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[22].attn.to_q.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].attn.to_q.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[22].attn.to_q.lora_dropout['default_0'], 140533127357216) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].attn.to_q.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].attn.to_q.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[22].attn.to_q.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].attn.to_q.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[22].attn.to_q.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[22].attn.to_q.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].attn.to_q.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[22].attn.to_q.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].attn.to_q.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[22].attn.to_q.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[22].attn.to_q.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].attn.to_q.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[22].attn.to_q.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].attn.to_q._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].attn.to_q._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].attn.to_q.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[22].attn.to_q.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[22].attn.to_q.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].attn.to_q._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[22].attn.to_q._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].attn.to_q._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].attn.to_q._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].attn.to_q._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[22].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[22].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].attn.to_v, accessed_by=DictGetItemGuardAccessor(to_v) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[22].attn.to_v, 140533127530432) # value = attn.to_v(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1718 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].attn.to_v.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[22].attn.to_v.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].attn.to_v.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[22].attn.to_v.training, 140591004393408) # value = attn.to_v(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1718 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].attn.to_v._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].attn.to_v.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[22].attn.to_v.lora_A, 140533127530864) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].attn.to_v.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].attn.to_v.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[22].attn.to_v.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].attn.to_v.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[22].attn.to_v.lora_A['default_0'], 140533127529760) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].attn.to_v.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].attn.to_v.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[22].attn.to_v.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].attn.to_v.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].attn.to_v.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[22].attn.to_v.lora_A['default_0'].weight, 140537320736464) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].attn.to_v.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[22].attn.to_v.lora_B, 140533127531920) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].attn.to_v.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].attn.to_v.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[22].attn.to_v.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].attn.to_v.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[22].attn.to_v.lora_B['default_0'], 140533127528944) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].attn.to_v.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].attn.to_v.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[22].attn.to_v.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].attn.to_v.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[22].attn.to_v.base_layer, 140581770777440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].attn.to_v.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].attn.to_v.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[22].attn.to_v.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].attn.to_v.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[22].attn.to_v.lora_dropout, 140533127540272) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].attn.to_v.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].attn.to_v.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[22].attn.to_v.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].attn.to_v.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[22].attn.to_v.lora_dropout['default_0'], 140533127544064) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].attn.to_v.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].attn.to_v.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[22].attn.to_v.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].attn.to_v.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[22].attn.to_v.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[22].attn.to_v.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].attn.to_v.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[22].attn.to_v.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].attn.to_v.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[22].attn.to_v.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[22].attn.to_v.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].attn.to_v.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[22].attn.to_v.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].attn.to_v._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].attn.to_v._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].attn.to_v.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[22].attn.to_v.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[22].attn.to_v.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].attn.to_v._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[22].attn.to_v._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].attn.to_v._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].attn.to_v._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].attn.to_v._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[22].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[22].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].attn.norm_k, accessed_by=DictGetItemGuardAccessor(norm_k) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[22].attn.norm_k, 140581770777344) # if attn.norm_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1729 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].attn.norm_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[22].attn.norm_k.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].attn.norm_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[22].attn.norm_k.training, 140591004393440) # if attn.norm_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1729 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].attn.norm_k.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[22].attn.norm_k.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].attn.norm_k._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].attn.norm_k.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[22].attn.norm_k.weight, 140581772783952) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].attn.norm_k._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].attn.norm_k._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].attn.norm_k._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].attn.norm_k._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].attn.norm_q, accessed_by=DictGetItemGuardAccessor(norm_q) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[22].attn.norm_q, 140581770777200) # if attn.norm_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1727 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].attn.norm_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[22].attn.norm_q.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].attn.norm_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[22].attn.norm_q.training, 140591004393440) # if attn.norm_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1727 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].attn.norm_q.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[22].attn.norm_q.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].attn.norm_q._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].attn.norm_q.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[22].attn.norm_q.weight, 140581773260208) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].attn.norm_q._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].attn.norm_q._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].attn.norm_q._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].attn.norm_q._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].attn.heads, accessed_by=DictGetItemGuardAccessor(heads) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[22].attn.heads == 24 # head_dim = inner_dim // attn.heads # diffusers/src/diffusers/models/attention_processor.py:1721 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].attn.processor, accessed_by=DictGetItemGuardAccessor(processor) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[22].attn.processor, 93831581524080) # attn_parameters = set(inspect.signature(self.processor.__call__).parameters.keys()) # diffusers/src/diffusers/models/attention_processor.py:479 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[22].attn.processor, 140581770777104) # return self.processor( # diffusers/src/diffusers/models/attention_processor.py:490 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].attn._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].attn._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].attn._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].attn._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].attn.forward, accessed_by=GetAttrGuardAccessor(forward) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].attn.forward, accessed_by=FuncDefaultsGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].attn.forward.__defaults__[0], accessed_by=GetItemGuardAccessor(0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[22].attn.forward.__defaults__[0], 140591004478624) # batch_size, _, _ = hidden_states.shape if encoder_hidden_states is None else encoder_hidden_states.shape # diffusers/src/diffusers/models/attention_processor.py:1713 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].norm, accessed_by=DictGetItemGuardAccessor(norm) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[22].norm, 140581770776768) # norm_hidden_states, gate = self.norm(hidden_states, emb=temb) # diffusers/src/diffusers/models/transformers/transformer_flux.py:88 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].norm.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[22].norm.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].norm.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[22].norm.training, 140591004393440) # norm_hidden_states, gate = self.norm(hidden_states, emb=temb) # diffusers/src/diffusers/models/transformers/transformer_flux.py:88 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].norm._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].norm.norm, accessed_by=DictGetItemGuardAccessor(norm) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[22].norm.norm, 140581770776912) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:171 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].norm.norm.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].norm.norm.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[22].norm.norm.training, 140591004393440) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:171 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].norm.silu, accessed_by=DictGetItemGuardAccessor(silu) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[22].norm.silu, 140581770776816) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].norm.silu.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].norm.silu.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[22].norm.silu.training, 140591004393440) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].norm.linear, accessed_by=DictGetItemGuardAccessor(linear) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[22].norm.linear, 140533218309456) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].norm.linear.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[22].norm.linear.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].norm.linear.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[22].norm.linear.training, 140591004393408) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].norm.linear._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].norm.linear.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[22].norm.linear.lora_A, 140533218296304) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].norm.linear.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].norm.linear.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[22].norm.linear.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].norm.linear.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[22].norm.linear.lora_A['default_0'], 140533217752016) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].norm.linear.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].norm.linear.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[22].norm.linear.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].norm.linear.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].norm.linear.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[22].norm.linear.lora_A['default_0'].weight, 140537320747744) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].norm.linear.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[22].norm.linear.lora_B, 140533218299760) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].norm.linear.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].norm.linear.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[22].norm.linear.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].norm.linear.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[22].norm.linear.lora_B['default_0'], 140533217741312) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].norm.linear.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].norm.linear.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[22].norm.linear.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].norm.linear.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[22].norm.linear.base_layer, 140581770776864) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].norm.linear.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].norm.linear.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[22].norm.linear.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].norm.linear.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[22].norm.linear.lora_dropout, 140533218305520) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].norm.linear.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].norm.linear.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[22].norm.linear.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].norm.linear.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[22].norm.linear.lora_dropout['default_0'], 140533218309696) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].norm.linear.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].norm.linear.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[22].norm.linear.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].norm.linear.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[22].norm.linear.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[22].norm.linear.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].norm.linear.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[22].norm.linear.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].norm.linear.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[22].norm.linear.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[22].norm.linear.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].norm.linear.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[22].norm.linear.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].norm.linear._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].norm.linear._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].norm.linear.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[22].norm.linear.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[22].norm.linear.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].norm.linear._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[22].norm.linear._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].norm.linear._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].norm.linear._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].norm.linear._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[22].norm.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[22].norm.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].norm._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].norm._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].norm._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].norm._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].act_mlp, accessed_by=DictGetItemGuardAccessor(act_mlp) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[22].act_mlp, 140581770777008) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].act_mlp.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].act_mlp.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[22].act_mlp.training, 140591004393440) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].proj_mlp, accessed_by=DictGetItemGuardAccessor(proj_mlp) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[22].proj_mlp, 140533217745296) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].proj_mlp.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[22].proj_mlp.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].proj_mlp.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[22].proj_mlp.training, 140591004393408) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].proj_mlp._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].proj_mlp.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[22].proj_mlp.lora_A, 140533130146912) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].proj_mlp.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].proj_mlp.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[22].proj_mlp.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].proj_mlp.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[22].proj_mlp.lora_A['default_0'], 140533130134960) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].proj_mlp.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].proj_mlp.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[22].proj_mlp.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].proj_mlp.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].proj_mlp.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[22].proj_mlp.lora_A['default_0'].weight, 140537320741904) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].proj_mlp.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[22].proj_mlp.lora_B, 140533130135920) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].proj_mlp.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].proj_mlp.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[22].proj_mlp.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].proj_mlp.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[22].proj_mlp.lora_B['default_0'], 140533130134480) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].proj_mlp.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].proj_mlp.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[22].proj_mlp.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].proj_mlp.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[22].proj_mlp.base_layer, 140581770776960) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].proj_mlp.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].proj_mlp.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[22].proj_mlp.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].proj_mlp.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[22].proj_mlp.lora_dropout, 140533217749280) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].proj_mlp.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].proj_mlp.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[22].proj_mlp.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].proj_mlp.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[22].proj_mlp.lora_dropout['default_0'], 140533217748944) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].proj_mlp.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].proj_mlp.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[22].proj_mlp.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].proj_mlp.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[22].proj_mlp.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[22].proj_mlp.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].proj_mlp.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[22].proj_mlp.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].proj_mlp.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[22].proj_mlp.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[22].proj_mlp.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].proj_mlp.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[22].proj_mlp.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].proj_mlp._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].proj_mlp._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].proj_mlp.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[22].proj_mlp.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[22].proj_mlp.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].proj_mlp._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[22].proj_mlp._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].proj_mlp._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].proj_mlp._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].proj_mlp._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[22].proj_mlp._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[22].proj_mlp._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].proj_out, accessed_by=DictGetItemGuardAccessor(proj_out) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[22].proj_out, 140533131364048) # hidden_states = gate * self.proj_out(hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:98 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].proj_out.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[22].proj_out.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].proj_out.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[22].proj_out.training, 140591004393408) # hidden_states = gate * self.proj_out(hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:98 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].proj_out._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].proj_out.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[22].proj_out.lora_A, 140533131364096) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].proj_out.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].proj_out.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[22].proj_out.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].proj_out.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[22].proj_out.lora_A['default_0'], 140533127354672) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].proj_out.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].proj_out.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[22].proj_out.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].proj_out.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].proj_out.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[22].proj_out.lora_A['default_0'].weight, 140537320739744) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].proj_out.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[22].proj_out.lora_B, 140533131364912) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].proj_out.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].proj_out.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[22].proj_out.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].proj_out.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[22].proj_out.lora_B['default_0'], 140533127355344) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].proj_out.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].proj_out.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[22].proj_out.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].proj_out.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[22].proj_out.base_layer, 140581770777056) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].proj_out.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].proj_out.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[22].proj_out.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].proj_out.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[22].proj_out.lora_dropout, 140533131363952) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].proj_out.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].proj_out.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[22].proj_out.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].proj_out.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[22].proj_out.lora_dropout['default_0'], 140533131364288) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].proj_out.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].proj_out.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[22].proj_out.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].proj_out.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[22].proj_out.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[22].proj_out.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].proj_out.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[22].proj_out.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].proj_out.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[22].proj_out.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[22].proj_out.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].proj_out.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[22].proj_out.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].proj_out._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].proj_out._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].proj_out.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[22].proj_out.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[22].proj_out.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].proj_out._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[22].proj_out._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].proj_out._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].proj_out._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].proj_out._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[22].proj_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[22].proj_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23], accessed_by=GetItemGuardAccessor(23) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[23], 140581770776720) # for index_block, block in enumerate(self.single_transformer_blocks): # diffusers/src/diffusers/models/transformers/transformer_flux.py:509 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[23].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[23].training, 140591004393440) # for index_block, block in enumerate(self.single_transformer_blocks): # diffusers/src/diffusers/models/transformers/transformer_flux.py:509 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].attn, accessed_by=DictGetItemGuardAccessor(attn) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[23].attn, 140581770777920) # attn_output = self.attn( # diffusers/src/diffusers/models/transformers/transformer_flux.py:91 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].attn.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[23].attn.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].attn.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[23].attn.training, 140591004393440) # attn_output = self.attn( # diffusers/src/diffusers/models/transformers/transformer_flux.py:91 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].attn._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].attn.to_k, accessed_by=DictGetItemGuardAccessor(to_k) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[23].attn.to_k, 140533218968672) # key = attn.to_k(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1717 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].attn.to_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[23].attn.to_k.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].attn.to_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[23].attn.to_k.training, 140591004393408) # key = attn.to_k(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1717 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].attn.to_k._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].attn.to_k.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[23].attn.to_k.lora_A, 140533218817552) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].attn.to_k.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].attn.to_k.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[23].attn.to_k.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].attn.to_k.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[23].attn.to_k.lora_A['default_0'], 140533219614944) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].attn.to_k.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].attn.to_k.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[23].attn.to_k.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].attn.to_k.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].attn.to_k.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[23].attn.to_k.lora_A['default_0'].weight, 140537320562960) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].attn.to_k.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[23].attn.to_k.lora_B, 140533218605952) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].attn.to_k.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].attn.to_k.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[23].attn.to_k.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].attn.to_k.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[23].attn.to_k.lora_B['default_0'], 140533219620848) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].attn.to_k.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].attn.to_k.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[23].attn.to_k.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].attn.to_k.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[23].attn.to_k.base_layer, 140581770778064) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].attn.to_k.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].attn.to_k.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[23].attn.to_k.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].attn.to_k.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[23].attn.to_k.lora_dropout, 140533218811408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].attn.to_k.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].attn.to_k.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[23].attn.to_k.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].attn.to_k.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[23].attn.to_k.lora_dropout['default_0'], 140533218971408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].attn.to_k.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].attn.to_k.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[23].attn.to_k.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].attn.to_k.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[23].attn.to_k.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[23].attn.to_k.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].attn.to_k.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[23].attn.to_k.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].attn.to_k.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[23].attn.to_k.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[23].attn.to_k.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].attn.to_k.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[23].attn.to_k.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].attn.to_k._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].attn.to_k._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].attn.to_k.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[23].attn.to_k.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[23].attn.to_k.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].attn.to_k._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[23].attn.to_k._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].attn.to_k._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].attn.to_k._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].attn.to_k._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[23].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[23].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].attn.to_q, accessed_by=DictGetItemGuardAccessor(to_q) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[23].attn.to_q, 140533212579344) # query = attn.to_q(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1716 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].attn.to_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[23].attn.to_q.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].attn.to_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[23].attn.to_q.training, 140591004393408) # query = attn.to_q(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1716 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].attn.to_q._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].attn.to_q.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[23].attn.to_q.lora_A, 140533219093536) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].attn.to_q.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].attn.to_q.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[23].attn.to_q.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].attn.to_q.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[23].attn.to_q.lora_A['default_0'], 140533218974480) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].attn.to_q.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].attn.to_q.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[23].attn.to_q.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].attn.to_q.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].attn.to_q.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[23].attn.to_q.lora_A['default_0'].weight, 140537320568400) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].attn.to_q.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[23].attn.to_q.lora_B, 140533219084512) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].attn.to_q.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].attn.to_q.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[23].attn.to_q.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].attn.to_q.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[23].attn.to_q.lora_B['default_0'], 140533218971312) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].attn.to_q.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].attn.to_q.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[23].attn.to_q.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].attn.to_q.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[23].attn.to_q.base_layer, 140581770778160) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].attn.to_q.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].attn.to_q.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[23].attn.to_q.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].attn.to_q.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[23].attn.to_q.lora_dropout, 140533219083744) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].attn.to_q.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].attn.to_q.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[23].attn.to_q.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].attn.to_q.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[23].attn.to_q.lora_dropout['default_0'], 140533219094640) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].attn.to_q.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].attn.to_q.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[23].attn.to_q.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].attn.to_q.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[23].attn.to_q.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[23].attn.to_q.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].attn.to_q.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[23].attn.to_q.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].attn.to_q.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[23].attn.to_q.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[23].attn.to_q.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].attn.to_q.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[23].attn.to_q.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].attn.to_q._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].attn.to_q._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].attn.to_q.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[23].attn.to_q.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[23].attn.to_q.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].attn.to_q._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[23].attn.to_q._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].attn.to_q._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].attn.to_q._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].attn.to_q._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[23].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[23].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].attn.to_v, accessed_by=DictGetItemGuardAccessor(to_v) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[23].attn.to_v, 140533219617872) # value = attn.to_v(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1718 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].attn.to_v.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[23].attn.to_v.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].attn.to_v.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[23].attn.to_v.training, 140591004393408) # value = attn.to_v(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1718 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].attn.to_v._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].attn.to_v.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[23].attn.to_v.lora_A, 140533219621808) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].attn.to_v.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].attn.to_v.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[23].attn.to_v.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].attn.to_v.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[23].attn.to_v.lora_A['default_0'], 140533219619456) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].attn.to_v.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].attn.to_v.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[23].attn.to_v.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].attn.to_v.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].attn.to_v.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[23].attn.to_v.lora_A['default_0'].weight, 140537322518256) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].attn.to_v.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[23].attn.to_v.lora_B, 140533219621232) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].attn.to_v.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].attn.to_v.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[23].attn.to_v.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].attn.to_v.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[23].attn.to_v.lora_B['default_0'], 140533219617632) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].attn.to_v.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].attn.to_v.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[23].attn.to_v.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].attn.to_v.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[23].attn.to_v.base_layer, 140581770778208) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].attn.to_v.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].attn.to_v.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[23].attn.to_v.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].attn.to_v.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[23].attn.to_v.lora_dropout, 140533219619264) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].attn.to_v.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].attn.to_v.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[23].attn.to_v.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].attn.to_v.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[23].attn.to_v.lora_dropout['default_0'], 140533219617824) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].attn.to_v.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].attn.to_v.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[23].attn.to_v.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].attn.to_v.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[23].attn.to_v.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[23].attn.to_v.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].attn.to_v.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[23].attn.to_v.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].attn.to_v.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[23].attn.to_v.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[23].attn.to_v.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].attn.to_v.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[23].attn.to_v.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].attn.to_v._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].attn.to_v._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].attn.to_v.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[23].attn.to_v.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[23].attn.to_v.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].attn.to_v._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[23].attn.to_v._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].attn.to_v._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].attn.to_v._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].attn.to_v._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[23].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[23].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].attn.norm_k, accessed_by=DictGetItemGuardAccessor(norm_k) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[23].attn.norm_k, 140581770778112) # if attn.norm_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1729 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].attn.norm_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[23].attn.norm_k.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].attn.norm_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[23].attn.norm_k.training, 140591004393440) # if attn.norm_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1729 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].attn.norm_k.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[23].attn.norm_k.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].attn.norm_k._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].attn.norm_k.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[23].attn.norm_k.weight, 140581765132864) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].attn.norm_k._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].attn.norm_k._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].attn.norm_k._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].attn.norm_k._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].attn.norm_q, accessed_by=DictGetItemGuardAccessor(norm_q) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[23].attn.norm_q, 140581770777968) # if attn.norm_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1727 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].attn.norm_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[23].attn.norm_q.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].attn.norm_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[23].attn.norm_q.training, 140591004393440) # if attn.norm_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1727 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].attn.norm_q.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[23].attn.norm_q.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].attn.norm_q._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].attn.norm_q.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[23].attn.norm_q.weight, 140581772776352) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].attn.norm_q._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].attn.norm_q._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].attn.norm_q._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].attn.norm_q._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].attn.heads, accessed_by=DictGetItemGuardAccessor(heads) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[23].attn.heads == 24 # head_dim = inner_dim // attn.heads # diffusers/src/diffusers/models/attention_processor.py:1721 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].attn.processor, accessed_by=DictGetItemGuardAccessor(processor) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[23].attn.processor, 93831581524080) # attn_parameters = set(inspect.signature(self.processor.__call__).parameters.keys()) # diffusers/src/diffusers/models/attention_processor.py:479 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[23].attn.processor, 140581770777872) # return self.processor( # diffusers/src/diffusers/models/attention_processor.py:490 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].attn._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].attn._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].attn._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].attn._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].attn.forward, accessed_by=GetAttrGuardAccessor(forward) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].attn.forward, accessed_by=FuncDefaultsGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].attn.forward.__defaults__[0], accessed_by=GetItemGuardAccessor(0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[23].attn.forward.__defaults__[0], 140591004478624) # batch_size, _, _ = hidden_states.shape if encoder_hidden_states is None else encoder_hidden_states.shape # diffusers/src/diffusers/models/attention_processor.py:1713 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].norm, accessed_by=DictGetItemGuardAccessor(norm) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[23].norm, 140581770777536) # norm_hidden_states, gate = self.norm(hidden_states, emb=temb) # diffusers/src/diffusers/models/transformers/transformer_flux.py:88 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].norm.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[23].norm.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].norm.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[23].norm.training, 140591004393440) # norm_hidden_states, gate = self.norm(hidden_states, emb=temb) # diffusers/src/diffusers/models/transformers/transformer_flux.py:88 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].norm._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].norm.norm, accessed_by=DictGetItemGuardAccessor(norm) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[23].norm.norm, 140581770777680) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:171 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].norm.norm.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].norm.norm.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[23].norm.norm.training, 140591004393440) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:171 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].norm.silu, accessed_by=DictGetItemGuardAccessor(silu) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[23].norm.silu, 140581770777584) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].norm.silu.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].norm.silu.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[23].norm.silu.training, 140591004393440) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].norm.linear, accessed_by=DictGetItemGuardAccessor(linear) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[23].norm.linear, 140533127532496) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].norm.linear.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[23].norm.linear.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].norm.linear.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[23].norm.linear.training, 140591004393408) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].norm.linear._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].norm.linear.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[23].norm.linear.lora_A, 140533128540704) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].norm.linear.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].norm.linear.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[23].norm.linear.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].norm.linear.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[23].norm.linear.lora_A['default_0'], 140533217962176) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].norm.linear.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].norm.linear.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[23].norm.linear.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].norm.linear.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].norm.linear.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[23].norm.linear.lora_A['default_0'].weight, 140537320554800) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].norm.linear.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[23].norm.linear.lora_B, 140533128542768) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].norm.linear.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].norm.linear.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[23].norm.linear.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].norm.linear.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[23].norm.linear.lora_B['default_0'], 140533217964576) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].norm.linear.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].norm.linear.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[23].norm.linear.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].norm.linear.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[23].norm.linear.base_layer, 140581770777632) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].norm.linear.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].norm.linear.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[23].norm.linear.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].norm.linear.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[23].norm.linear.lora_dropout, 140533127534320) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].norm.linear.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].norm.linear.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[23].norm.linear.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].norm.linear.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[23].norm.linear.lora_dropout['default_0'], 140533127543344) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].norm.linear.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].norm.linear.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[23].norm.linear.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].norm.linear.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[23].norm.linear.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[23].norm.linear.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].norm.linear.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[23].norm.linear.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].norm.linear.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[23].norm.linear.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[23].norm.linear.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].norm.linear.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[23].norm.linear.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].norm.linear._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].norm.linear._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].norm.linear.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[23].norm.linear.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[23].norm.linear.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].norm.linear._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[23].norm.linear._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].norm.linear._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].norm.linear._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].norm.linear._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[23].norm.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[23].norm.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].norm._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].norm._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].norm._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].norm._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].act_mlp, accessed_by=DictGetItemGuardAccessor(act_mlp) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[23].act_mlp, 140581770777776) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].act_mlp.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].act_mlp.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[23].act_mlp.training, 140591004393440) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].proj_mlp, accessed_by=DictGetItemGuardAccessor(proj_mlp) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[23].proj_mlp, 140533217951952) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].proj_mlp.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[23].proj_mlp.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].proj_mlp.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[23].proj_mlp.training, 140591004393408) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].proj_mlp._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].proj_mlp.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[23].proj_mlp.lora_A, 140533219430256) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].proj_mlp.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].proj_mlp.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[23].proj_mlp.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].proj_mlp.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[23].proj_mlp.lora_A['default_0'], 140533219260640) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].proj_mlp.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].proj_mlp.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[23].proj_mlp.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].proj_mlp.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].proj_mlp.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[23].proj_mlp.lora_A['default_0'].weight, 140537320561040) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].proj_mlp.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[23].proj_mlp.lora_B, 140533219429248) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].proj_mlp.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].proj_mlp.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[23].proj_mlp.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].proj_mlp.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[23].proj_mlp.lora_B['default_0'], 140533219256800) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].proj_mlp.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].proj_mlp.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[23].proj_mlp.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].proj_mlp.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[23].proj_mlp.base_layer, 140581770777728) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].proj_mlp.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].proj_mlp.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[23].proj_mlp.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].proj_mlp.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[23].proj_mlp.lora_dropout, 140533217955504) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].proj_mlp.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].proj_mlp.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[23].proj_mlp.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].proj_mlp.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[23].proj_mlp.lora_dropout['default_0'], 140533217953920) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].proj_mlp.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].proj_mlp.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[23].proj_mlp.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].proj_mlp.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[23].proj_mlp.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[23].proj_mlp.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].proj_mlp.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[23].proj_mlp.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].proj_mlp.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[23].proj_mlp.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[23].proj_mlp.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].proj_mlp.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[23].proj_mlp.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].proj_mlp._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].proj_mlp._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].proj_mlp.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[23].proj_mlp.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[23].proj_mlp.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].proj_mlp._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[23].proj_mlp._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].proj_mlp._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].proj_mlp._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].proj_mlp._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[23].proj_mlp._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[23].proj_mlp._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].proj_out, accessed_by=DictGetItemGuardAccessor(proj_out) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[23].proj_out, 140533217620656) # hidden_states = gate * self.proj_out(hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:98 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].proj_out.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[23].proj_out.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].proj_out.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[23].proj_out.training, 140591004393408) # hidden_states = gate * self.proj_out(hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:98 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].proj_out._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].proj_out.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[23].proj_out.lora_A, 140533217613840) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].proj_out.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].proj_out.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[23].proj_out.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].proj_out.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[23].proj_out.lora_A['default_0'], 140533217500688) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].proj_out.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].proj_out.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[23].proj_out.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].proj_out.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].proj_out.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[23].proj_out.lora_A['default_0'].weight, 140537320559360) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].proj_out.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[23].proj_out.lora_B, 140533217614080) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].proj_out.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].proj_out.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[23].proj_out.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].proj_out.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[23].proj_out.lora_B['default_0'], 140533217500640) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].proj_out.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].proj_out.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[23].proj_out.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].proj_out.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[23].proj_out.base_layer, 140581770777824) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].proj_out.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].proj_out.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[23].proj_out.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].proj_out.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[23].proj_out.lora_dropout, 140533217623632) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].proj_out.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].proj_out.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[23].proj_out.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].proj_out.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[23].proj_out.lora_dropout['default_0'], 140533217623392) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].proj_out.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].proj_out.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[23].proj_out.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].proj_out.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[23].proj_out.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[23].proj_out.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].proj_out.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[23].proj_out.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].proj_out.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[23].proj_out.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[23].proj_out.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].proj_out.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[23].proj_out.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].proj_out._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].proj_out._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].proj_out.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[23].proj_out.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[23].proj_out.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].proj_out._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[23].proj_out._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].proj_out._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].proj_out._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].proj_out._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[23].proj_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[23].proj_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24], accessed_by=GetItemGuardAccessor(24) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[24], 140581770777488) # for index_block, block in enumerate(self.single_transformer_blocks): # diffusers/src/diffusers/models/transformers/transformer_flux.py:509 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[24].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[24].training, 140591004393440) # for index_block, block in enumerate(self.single_transformer_blocks): # diffusers/src/diffusers/models/transformers/transformer_flux.py:509 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].attn, accessed_by=DictGetItemGuardAccessor(attn) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[24].attn, 140581770778688) # attn_output = self.attn( # diffusers/src/diffusers/models/transformers/transformer_flux.py:91 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].attn.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[24].attn.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].attn.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[24].attn.training, 140591004393440) # attn_output = self.attn( # diffusers/src/diffusers/models/transformers/transformer_flux.py:91 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].attn._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].attn.to_k, accessed_by=DictGetItemGuardAccessor(to_k) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[24].attn.to_k, 140533224363184) # key = attn.to_k(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1717 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].attn.to_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[24].attn.to_k.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].attn.to_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[24].attn.to_k.training, 140591004393408) # key = attn.to_k(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1717 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].attn.to_k._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].attn.to_k.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[24].attn.to_k.lora_A, 140533224365584) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].attn.to_k.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].attn.to_k.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[24].attn.to_k.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].attn.to_k.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[24].attn.to_k.lora_A['default_0'], 140533224367840) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].attn.to_k.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].attn.to_k.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[24].attn.to_k.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].attn.to_k.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].attn.to_k.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[24].attn.to_k.lora_A['default_0'].weight, 140537322515056) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].attn.to_k.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[24].attn.to_k.lora_B, 140533224366016) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].attn.to_k.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].attn.to_k.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[24].attn.to_k.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].attn.to_k.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[24].attn.to_k.lora_B['default_0'], 140533224367168) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].attn.to_k.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].attn.to_k.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[24].attn.to_k.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].attn.to_k.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[24].attn.to_k.base_layer, 140581770778832) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].attn.to_k.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].attn.to_k.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[24].attn.to_k.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].attn.to_k.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[24].attn.to_k.lora_dropout, 140533224362800) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].attn.to_k.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].attn.to_k.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[24].attn.to_k.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].attn.to_k.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[24].attn.to_k.lora_dropout['default_0'], 140533224363904) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].attn.to_k.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].attn.to_k.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[24].attn.to_k.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].attn.to_k.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[24].attn.to_k.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[24].attn.to_k.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].attn.to_k.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[24].attn.to_k.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].attn.to_k.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[24].attn.to_k.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[24].attn.to_k.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].attn.to_k.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[24].attn.to_k.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].attn.to_k._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].attn.to_k._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].attn.to_k.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[24].attn.to_k.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[24].attn.to_k.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].attn.to_k._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[24].attn.to_k._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].attn.to_k._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].attn.to_k._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].attn.to_k._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[24].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[24].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].attn.to_q, accessed_by=DictGetItemGuardAccessor(to_q) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[24].attn.to_q, 140533224362560) # query = attn.to_q(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1716 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].attn.to_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[24].attn.to_q.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].attn.to_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[24].attn.to_q.training, 140591004393408) # query = attn.to_q(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1716 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].attn.to_q._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].attn.to_q.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[24].attn.to_q.lora_A, 140533224365248) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].attn.to_q.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].attn.to_q.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[24].attn.to_q.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].attn.to_q.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[24].attn.to_q.lora_A['default_0'], 140533224361360) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].attn.to_q.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].attn.to_q.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[24].attn.to_q.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].attn.to_q.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].attn.to_q.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[24].attn.to_q.lora_A['default_0'].weight, 140537322504656) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].attn.to_q.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[24].attn.to_q.lora_B, 140533224360592) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].attn.to_q.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].attn.to_q.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[24].attn.to_q.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].attn.to_q.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[24].attn.to_q.lora_B['default_0'], 140533224361312) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].attn.to_q.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].attn.to_q.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[24].attn.to_q.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].attn.to_q.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[24].attn.to_q.base_layer, 140581770778928) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].attn.to_q.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].attn.to_q.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[24].attn.to_q.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].attn.to_q.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[24].attn.to_q.lora_dropout, 140533224363472) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].attn.to_q.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].attn.to_q.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[24].attn.to_q.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].attn.to_q.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[24].attn.to_q.lora_dropout['default_0'], 140533224368608) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].attn.to_q.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].attn.to_q.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[24].attn.to_q.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].attn.to_q.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[24].attn.to_q.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[24].attn.to_q.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].attn.to_q.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[24].attn.to_q.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].attn.to_q.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[24].attn.to_q.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[24].attn.to_q.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].attn.to_q.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[24].attn.to_q.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].attn.to_q._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].attn.to_q._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].attn.to_q.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[24].attn.to_q.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[24].attn.to_q.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].attn.to_q._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[24].attn.to_q._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].attn.to_q._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].attn.to_q._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].attn.to_q._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[24].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[24].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].attn.to_v, accessed_by=DictGetItemGuardAccessor(to_v) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[24].attn.to_v, 140533224368320) # value = attn.to_v(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1718 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].attn.to_v.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[24].attn.to_v.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].attn.to_v.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[24].attn.to_v.training, 140591004393408) # value = attn.to_v(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1718 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].attn.to_v._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].attn.to_v.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[24].attn.to_v.lora_A, 140533225364768) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].attn.to_v.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].attn.to_v.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[24].attn.to_v.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].attn.to_v.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[24].attn.to_v.lora_A['default_0'], 140533225371296) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].attn.to_v.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].attn.to_v.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[24].attn.to_v.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].attn.to_v.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].attn.to_v.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[24].attn.to_v.lora_A['default_0'].weight, 140537322509936) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].attn.to_v.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[24].attn.to_v.lora_B, 140533225368224) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].attn.to_v.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].attn.to_v.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[24].attn.to_v.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].attn.to_v.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[24].attn.to_v.lora_B['default_0'], 140533225371872) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].attn.to_v.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].attn.to_v.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[24].attn.to_v.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].attn.to_v.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[24].attn.to_v.base_layer, 140581770778976) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].attn.to_v.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].attn.to_v.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[24].attn.to_v.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].attn.to_v.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[24].attn.to_v.lora_dropout, 140533224365872) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].attn.to_v.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].attn.to_v.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[24].attn.to_v.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].attn.to_v.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[24].attn.to_v.lora_dropout['default_0'], 140533224368752) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].attn.to_v.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].attn.to_v.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[24].attn.to_v.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].attn.to_v.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[24].attn.to_v.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[24].attn.to_v.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].attn.to_v.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[24].attn.to_v.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].attn.to_v.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[24].attn.to_v.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[24].attn.to_v.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].attn.to_v.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[24].attn.to_v.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].attn.to_v._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].attn.to_v._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].attn.to_v.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[24].attn.to_v.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[24].attn.to_v.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].attn.to_v._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[24].attn.to_v._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].attn.to_v._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].attn.to_v._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].attn.to_v._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[24].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[24].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].attn.norm_k, accessed_by=DictGetItemGuardAccessor(norm_k) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[24].attn.norm_k, 140581770778880) # if attn.norm_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1729 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].attn.norm_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[24].attn.norm_k.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].attn.norm_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[24].attn.norm_k.training, 140591004393440) # if attn.norm_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1729 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].attn.norm_k.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[24].attn.norm_k.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].attn.norm_k._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].attn.norm_k.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[24].attn.norm_k.weight, 140581771019104) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].attn.norm_k._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].attn.norm_k._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].attn.norm_k._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].attn.norm_k._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].attn.norm_q, accessed_by=DictGetItemGuardAccessor(norm_q) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[24].attn.norm_q, 140581770778736) # if attn.norm_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1727 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].attn.norm_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[24].attn.norm_q.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].attn.norm_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[24].attn.norm_q.training, 140591004393440) # if attn.norm_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1727 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].attn.norm_q.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[24].attn.norm_q.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].attn.norm_q._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].attn.norm_q.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[24].attn.norm_q.weight, 140581773261248) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].attn.norm_q._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].attn.norm_q._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].attn.norm_q._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].attn.norm_q._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].attn.heads, accessed_by=DictGetItemGuardAccessor(heads) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[24].attn.heads == 24 # head_dim = inner_dim // attn.heads # diffusers/src/diffusers/models/attention_processor.py:1721 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].attn.processor, accessed_by=DictGetItemGuardAccessor(processor) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[24].attn.processor, 93831581524080) # attn_parameters = set(inspect.signature(self.processor.__call__).parameters.keys()) # diffusers/src/diffusers/models/attention_processor.py:479 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[24].attn.processor, 140581770778640) # return self.processor( # diffusers/src/diffusers/models/attention_processor.py:490 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].attn._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].attn._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].attn._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].attn._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].attn.forward, accessed_by=GetAttrGuardAccessor(forward) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].attn.forward, accessed_by=FuncDefaultsGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].attn.forward.__defaults__[0], accessed_by=GetItemGuardAccessor(0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[24].attn.forward.__defaults__[0], 140591004478624) # batch_size, _, _ = hidden_states.shape if encoder_hidden_states is None else encoder_hidden_states.shape # diffusers/src/diffusers/models/attention_processor.py:1713 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].norm, accessed_by=DictGetItemGuardAccessor(norm) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[24].norm, 140581770778304) # norm_hidden_states, gate = self.norm(hidden_states, emb=temb) # diffusers/src/diffusers/models/transformers/transformer_flux.py:88 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].norm.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[24].norm.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].norm.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[24].norm.training, 140591004393440) # norm_hidden_states, gate = self.norm(hidden_states, emb=temb) # diffusers/src/diffusers/models/transformers/transformer_flux.py:88 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].norm._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].norm.norm, accessed_by=DictGetItemGuardAccessor(norm) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[24].norm.norm, 140581770778448) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:171 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].norm.norm.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].norm.norm.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[24].norm.norm.training, 140591004393440) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:171 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].norm.silu, accessed_by=DictGetItemGuardAccessor(silu) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[24].norm.silu, 140581770778352) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].norm.silu.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].norm.silu.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[24].norm.silu.training, 140591004393440) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].norm.linear, accessed_by=DictGetItemGuardAccessor(linear) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[24].norm.linear, 140533219616192) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].norm.linear.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[24].norm.linear.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].norm.linear.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[24].norm.linear.training, 140591004393408) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].norm.linear._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].norm.linear.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[24].norm.linear.lora_A, 140533219612880) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].norm.linear.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].norm.linear.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[24].norm.linear.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].norm.linear.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[24].norm.linear.lora_A['default_0'], 140533219613168) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].norm.linear.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].norm.linear.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[24].norm.linear.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].norm.linear.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].norm.linear.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[24].norm.linear.lora_A['default_0'].weight, 140537322505616) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].norm.linear.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[24].norm.linear.lora_B, 140533219612448) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].norm.linear.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].norm.linear.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[24].norm.linear.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].norm.linear.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[24].norm.linear.lora_B['default_0'], 140533219613216) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].norm.linear.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].norm.linear.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[24].norm.linear.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].norm.linear.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[24].norm.linear.base_layer, 140581770778400) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].norm.linear.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].norm.linear.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[24].norm.linear.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].norm.linear.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[24].norm.linear.lora_dropout, 140533219620656) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].norm.linear.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].norm.linear.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[24].norm.linear.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].norm.linear.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[24].norm.linear.lora_dropout['default_0'], 140533219620128) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].norm.linear.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].norm.linear.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[24].norm.linear.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].norm.linear.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[24].norm.linear.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[24].norm.linear.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].norm.linear.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[24].norm.linear.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].norm.linear.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[24].norm.linear.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[24].norm.linear.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].norm.linear.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[24].norm.linear.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].norm.linear._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].norm.linear._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].norm.linear.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[24].norm.linear.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[24].norm.linear.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].norm.linear._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[24].norm.linear._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].norm.linear._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].norm.linear._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].norm.linear._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[24].norm.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[24].norm.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].norm._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].norm._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].norm._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].norm._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].act_mlp, accessed_by=DictGetItemGuardAccessor(act_mlp) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[24].act_mlp, 140581770778544) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].act_mlp.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].act_mlp.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[24].act_mlp.training, 140591004393440) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].proj_mlp, accessed_by=DictGetItemGuardAccessor(proj_mlp) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[24].proj_mlp, 140533219611344) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].proj_mlp.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[24].proj_mlp.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].proj_mlp.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[24].proj_mlp.training, 140591004393408) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].proj_mlp._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].proj_mlp.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[24].proj_mlp.lora_A, 140533219606976) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].proj_mlp.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].proj_mlp.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[24].proj_mlp.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].proj_mlp.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[24].proj_mlp.lora_A['default_0'], 140533219608272) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].proj_mlp.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].proj_mlp.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[24].proj_mlp.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].proj_mlp.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].proj_mlp.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[24].proj_mlp.lora_A['default_0'].weight, 140537322513456) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].proj_mlp.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[24].proj_mlp.lora_B, 140533219607168) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].proj_mlp.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].proj_mlp.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[24].proj_mlp.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].proj_mlp.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[24].proj_mlp.lora_B['default_0'], 140533219608080) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].proj_mlp.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].proj_mlp.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[24].proj_mlp.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].proj_mlp.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[24].proj_mlp.base_layer, 140581770778496) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].proj_mlp.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].proj_mlp.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[24].proj_mlp.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].proj_mlp.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[24].proj_mlp.lora_dropout, 140533219607936) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].proj_mlp.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].proj_mlp.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[24].proj_mlp.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].proj_mlp.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[24].proj_mlp.lora_dropout['default_0'], 140533219612352) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].proj_mlp.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].proj_mlp.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[24].proj_mlp.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].proj_mlp.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[24].proj_mlp.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[24].proj_mlp.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].proj_mlp.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[24].proj_mlp.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].proj_mlp.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[24].proj_mlp.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[24].proj_mlp.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].proj_mlp.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[24].proj_mlp.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].proj_mlp._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].proj_mlp._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].proj_mlp.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[24].proj_mlp.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[24].proj_mlp.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].proj_mlp._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[24].proj_mlp._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].proj_mlp._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].proj_mlp._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].proj_mlp._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[24].proj_mlp._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[24].proj_mlp._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].proj_out, accessed_by=DictGetItemGuardAccessor(proj_out) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[24].proj_out, 140533219614656) # hidden_states = gate * self.proj_out(hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:98 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].proj_out.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[24].proj_out.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].proj_out.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[24].proj_out.training, 140591004393408) # hidden_states = gate * self.proj_out(hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:98 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].proj_out._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].proj_out.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[24].proj_out.lora_A, 140533224367552) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].proj_out.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].proj_out.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[24].proj_out.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].proj_out.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[24].proj_out.lora_A['default_0'], 140533224369808) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].proj_out.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].proj_out.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[24].proj_out.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].proj_out.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].proj_out.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[24].proj_out.lora_A['default_0'].weight, 140537322512496) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].proj_out.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[24].proj_out.lora_B, 140533224374176) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].proj_out.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].proj_out.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[24].proj_out.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].proj_out.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[24].proj_out.lora_B['default_0'], 140533224372208) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].proj_out.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].proj_out.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[24].proj_out.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].proj_out.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[24].proj_out.base_layer, 140581770778592) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].proj_out.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].proj_out.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[24].proj_out.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].proj_out.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[24].proj_out.lora_dropout, 140533219622480) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].proj_out.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].proj_out.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[24].proj_out.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].proj_out.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[24].proj_out.lora_dropout['default_0'], 140533219616816) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].proj_out.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].proj_out.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[24].proj_out.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].proj_out.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[24].proj_out.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[24].proj_out.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].proj_out.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[24].proj_out.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].proj_out.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[24].proj_out.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[24].proj_out.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].proj_out.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[24].proj_out.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].proj_out._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].proj_out._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].proj_out.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[24].proj_out.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[24].proj_out.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].proj_out._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[24].proj_out._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].proj_out._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].proj_out._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].proj_out._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[24].proj_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[24].proj_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25], accessed_by=GetItemGuardAccessor(25) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[25], 140581770778256) # for index_block, block in enumerate(self.single_transformer_blocks): # diffusers/src/diffusers/models/transformers/transformer_flux.py:509 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[25].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[25].training, 140591004393440) # for index_block, block in enumerate(self.single_transformer_blocks): # diffusers/src/diffusers/models/transformers/transformer_flux.py:509 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].attn, accessed_by=DictGetItemGuardAccessor(attn) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[25].attn, 140581770779456) # attn_output = self.attn( # diffusers/src/diffusers/models/transformers/transformer_flux.py:91 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].attn.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[25].attn.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].attn.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[25].attn.training, 140591004393440) # attn_output = self.attn( # diffusers/src/diffusers/models/transformers/transformer_flux.py:91 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].attn._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].attn.to_k, accessed_by=DictGetItemGuardAccessor(to_k) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[25].attn.to_k, 140533226590304) # key = attn.to_k(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1717 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].attn.to_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[25].attn.to_k.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].attn.to_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[25].attn.to_k.training, 140591004393408) # key = attn.to_k(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1717 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].attn.to_k._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].attn.to_k.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[25].attn.to_k.lora_A, 140533226595344) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].attn.to_k.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].attn.to_k.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[25].attn.to_k.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].attn.to_k.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[25].attn.to_k.lora_A['default_0'], 140533226586272) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].attn.to_k.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].attn.to_k.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[25].attn.to_k.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].attn.to_k.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].attn.to_k.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[25].attn.to_k.lora_A['default_0'].weight, 140537322388144) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].attn.to_k.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[25].attn.to_k.lora_B, 140533226590880) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].attn.to_k.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].attn.to_k.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[25].attn.to_k.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].attn.to_k.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[25].attn.to_k.lora_B['default_0'], 140533226586176) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].attn.to_k.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].attn.to_k.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[25].attn.to_k.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].attn.to_k.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[25].attn.to_k.base_layer, 140581770779600) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].attn.to_k.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].attn.to_k.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[25].attn.to_k.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].attn.to_k.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[25].attn.to_k.lora_dropout, 140533226593712) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].attn.to_k.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].attn.to_k.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[25].attn.to_k.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].attn.to_k.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[25].attn.to_k.lora_dropout['default_0'], 140533226594240) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].attn.to_k.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].attn.to_k.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[25].attn.to_k.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].attn.to_k.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[25].attn.to_k.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[25].attn.to_k.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].attn.to_k.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[25].attn.to_k.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].attn.to_k.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[25].attn.to_k.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[25].attn.to_k.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].attn.to_k.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[25].attn.to_k.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].attn.to_k._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].attn.to_k._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].attn.to_k.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[25].attn.to_k.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[25].attn.to_k.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].attn.to_k._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[25].attn.to_k._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].attn.to_k._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].attn.to_k._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].attn.to_k._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[25].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[25].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].attn.to_q, accessed_by=DictGetItemGuardAccessor(to_q) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[25].attn.to_q, 140533226602400) # query = attn.to_q(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1716 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].attn.to_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[25].attn.to_q.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].attn.to_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[25].attn.to_q.training, 140591004393408) # query = attn.to_q(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1716 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].attn.to_q._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].attn.to_q.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[25].attn.to_q.lora_A, 140533226599088) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].attn.to_q.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].attn.to_q.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[25].attn.to_q.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].attn.to_q.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[25].attn.to_q.lora_A['default_0'], 140533226592752) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].attn.to_q.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].attn.to_q.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[25].attn.to_q.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].attn.to_q.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].attn.to_q.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[25].attn.to_q.lora_A['default_0'].weight, 140537322379824) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].attn.to_q.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[25].attn.to_q.lora_B, 140533226598992) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].attn.to_q.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].attn.to_q.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[25].attn.to_q.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].attn.to_q.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[25].attn.to_q.lora_B['default_0'], 140533226596544) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].attn.to_q.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].attn.to_q.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[25].attn.to_q.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].attn.to_q.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[25].attn.to_q.base_layer, 140581770779696) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].attn.to_q.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].attn.to_q.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[25].attn.to_q.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].attn.to_q.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[25].attn.to_q.lora_dropout, 140533226601872) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].attn.to_q.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].attn.to_q.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[25].attn.to_q.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].attn.to_q.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[25].attn.to_q.lora_dropout['default_0'], 140533226596832) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].attn.to_q.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].attn.to_q.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[25].attn.to_q.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].attn.to_q.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[25].attn.to_q.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[25].attn.to_q.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].attn.to_q.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[25].attn.to_q.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].attn.to_q.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[25].attn.to_q.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[25].attn.to_q.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].attn.to_q.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[25].attn.to_q.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].attn.to_q._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].attn.to_q._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].attn.to_q.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[25].attn.to_q.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[25].attn.to_q.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].attn.to_q._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[25].attn.to_q._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].attn.to_q._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].attn.to_q._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].attn.to_q._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[25].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[25].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].attn.to_v, accessed_by=DictGetItemGuardAccessor(to_v) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[25].attn.to_v, 140533226588720) # value = attn.to_v(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1718 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].attn.to_v.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[25].attn.to_v.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].attn.to_v.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[25].attn.to_v.training, 140591004393408) # value = attn.to_v(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1718 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].attn.to_v._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].attn.to_v.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[25].attn.to_v.lora_A, 140533226588384) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].attn.to_v.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].attn.to_v.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[25].attn.to_v.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].attn.to_v.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[25].attn.to_v.lora_A['default_0'], 140533226592176) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].attn.to_v.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].attn.to_v.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[25].attn.to_v.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].attn.to_v.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].attn.to_v.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[25].attn.to_v.lora_A['default_0'].weight, 140537322377264) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].attn.to_v.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[25].attn.to_v.lora_B, 140533226590784) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].attn.to_v.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].attn.to_v.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[25].attn.to_v.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].attn.to_v.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[25].attn.to_v.lora_B['default_0'], 140533226602352) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].attn.to_v.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].attn.to_v.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[25].attn.to_v.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].attn.to_v.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[25].attn.to_v.base_layer, 140581770779744) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].attn.to_v.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].attn.to_v.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[25].attn.to_v.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].attn.to_v.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[25].attn.to_v.lora_dropout, 140533226588960) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].attn.to_v.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].attn.to_v.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[25].attn.to_v.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].attn.to_v.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[25].attn.to_v.lora_dropout['default_0'], 140533226587376) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].attn.to_v.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].attn.to_v.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[25].attn.to_v.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].attn.to_v.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[25].attn.to_v.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[25].attn.to_v.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].attn.to_v.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[25].attn.to_v.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].attn.to_v.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[25].attn.to_v.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[25].attn.to_v.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].attn.to_v.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[25].attn.to_v.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].attn.to_v._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].attn.to_v._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].attn.to_v.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[25].attn.to_v.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[25].attn.to_v.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].attn.to_v._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[25].attn.to_v._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].attn.to_v._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].attn.to_v._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].attn.to_v._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[25].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[25].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].attn.norm_k, accessed_by=DictGetItemGuardAccessor(norm_k) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[25].attn.norm_k, 140581770779648) # if attn.norm_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1729 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].attn.norm_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[25].attn.norm_k.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].attn.norm_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[25].attn.norm_k.training, 140591004393440) # if attn.norm_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1729 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].attn.norm_k.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[25].attn.norm_k.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].attn.norm_k._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].attn.norm_k.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[25].attn.norm_k.weight, 140581773247968) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].attn.norm_k._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].attn.norm_k._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].attn.norm_k._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].attn.norm_k._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].attn.norm_q, accessed_by=DictGetItemGuardAccessor(norm_q) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[25].attn.norm_q, 140581770779504) # if attn.norm_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1727 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].attn.norm_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[25].attn.norm_q.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].attn.norm_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[25].attn.norm_q.training, 140591004393440) # if attn.norm_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1727 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].attn.norm_q.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[25].attn.norm_q.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].attn.norm_q._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].attn.norm_q.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[25].attn.norm_q.weight, 140581765132224) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].attn.norm_q._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].attn.norm_q._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].attn.norm_q._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].attn.norm_q._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].attn.heads, accessed_by=DictGetItemGuardAccessor(heads) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[25].attn.heads == 24 # head_dim = inner_dim // attn.heads # diffusers/src/diffusers/models/attention_processor.py:1721 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].attn.processor, accessed_by=DictGetItemGuardAccessor(processor) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[25].attn.processor, 93831581524080) # attn_parameters = set(inspect.signature(self.processor.__call__).parameters.keys()) # diffusers/src/diffusers/models/attention_processor.py:479 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[25].attn.processor, 140581770779408) # return self.processor( # diffusers/src/diffusers/models/attention_processor.py:490 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].attn._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].attn._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].attn._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].attn._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].attn.forward, accessed_by=GetAttrGuardAccessor(forward) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].attn.forward, accessed_by=FuncDefaultsGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].attn.forward.__defaults__[0], accessed_by=GetItemGuardAccessor(0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[25].attn.forward.__defaults__[0], 140591004478624) # batch_size, _, _ = hidden_states.shape if encoder_hidden_states is None else encoder_hidden_states.shape # diffusers/src/diffusers/models/attention_processor.py:1713 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].norm, accessed_by=DictGetItemGuardAccessor(norm) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[25].norm, 140581770779072) # norm_hidden_states, gate = self.norm(hidden_states, emb=temb) # diffusers/src/diffusers/models/transformers/transformer_flux.py:88 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].norm.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[25].norm.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].norm.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[25].norm.training, 140591004393440) # norm_hidden_states, gate = self.norm(hidden_states, emb=temb) # diffusers/src/diffusers/models/transformers/transformer_flux.py:88 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].norm._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].norm.norm, accessed_by=DictGetItemGuardAccessor(norm) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[25].norm.norm, 140581770779216) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:171 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].norm.norm.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].norm.norm.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[25].norm.norm.training, 140591004393440) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:171 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].norm.silu, accessed_by=DictGetItemGuardAccessor(silu) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[25].norm.silu, 140581770779120) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].norm.silu.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].norm.silu.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[25].norm.silu.training, 140591004393440) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].norm.linear, accessed_by=DictGetItemGuardAccessor(linear) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[25].norm.linear, 140533225367984) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].norm.linear.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[25].norm.linear.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].norm.linear.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[25].norm.linear.training, 140591004393408) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].norm.linear._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].norm.linear.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[25].norm.linear.lora_A, 140533225369328) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].norm.linear.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].norm.linear.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[25].norm.linear.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].norm.linear.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[25].norm.linear.lora_A['default_0'], 140533225365488) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].norm.linear.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].norm.linear.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[25].norm.linear.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].norm.linear.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].norm.linear.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[25].norm.linear.lora_A['default_0'].weight, 140537322507936) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].norm.linear.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[25].norm.linear.lora_B, 140533225362752) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].norm.linear.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].norm.linear.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[25].norm.linear.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].norm.linear.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[25].norm.linear.lora_B['default_0'], 140533225361216) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].norm.linear.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].norm.linear.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[25].norm.linear.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].norm.linear.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[25].norm.linear.base_layer, 140581770779168) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].norm.linear.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].norm.linear.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[25].norm.linear.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].norm.linear.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[25].norm.linear.lora_dropout, 140533225368896) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].norm.linear.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].norm.linear.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[25].norm.linear.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].norm.linear.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[25].norm.linear.lora_dropout['default_0'], 140533225369664) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].norm.linear.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].norm.linear.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[25].norm.linear.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].norm.linear.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[25].norm.linear.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[25].norm.linear.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].norm.linear.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[25].norm.linear.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].norm.linear.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[25].norm.linear.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[25].norm.linear.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].norm.linear.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[25].norm.linear.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].norm.linear._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].norm.linear._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].norm.linear.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[25].norm.linear.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[25].norm.linear.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].norm.linear._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[25].norm.linear._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].norm.linear._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].norm.linear._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].norm.linear._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[25].norm.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[25].norm.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].norm._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].norm._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].norm._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].norm._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].act_mlp, accessed_by=DictGetItemGuardAccessor(act_mlp) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[25].act_mlp, 140581770779312) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].act_mlp.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].act_mlp.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[25].act_mlp.training, 140591004393440) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].proj_mlp, accessed_by=DictGetItemGuardAccessor(proj_mlp) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[25].proj_mlp, 140533225364576) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].proj_mlp.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[25].proj_mlp.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].proj_mlp.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[25].proj_mlp.training, 140591004393408) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].proj_mlp._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].proj_mlp.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[25].proj_mlp.lora_A, 140533225366976) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].proj_mlp.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].proj_mlp.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[25].proj_mlp.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].proj_mlp.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[25].proj_mlp.lora_A['default_0'], 140533225359104) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].proj_mlp.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].proj_mlp.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[25].proj_mlp.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].proj_mlp.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].proj_mlp.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[25].proj_mlp.lora_A['default_0'].weight, 140537322383024) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].proj_mlp.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[25].proj_mlp.lora_B, 140533225358480) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].proj_mlp.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].proj_mlp.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[25].proj_mlp.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].proj_mlp.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[25].proj_mlp.lora_B['default_0'], 140533225358912) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].proj_mlp.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].proj_mlp.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[25].proj_mlp.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].proj_mlp.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[25].proj_mlp.base_layer, 140581770779264) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].proj_mlp.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].proj_mlp.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[25].proj_mlp.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].proj_mlp.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[25].proj_mlp.lora_dropout, 140533225364528) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].proj_mlp.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].proj_mlp.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[25].proj_mlp.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].proj_mlp.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[25].proj_mlp.lora_dropout['default_0'], 140533225358528) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].proj_mlp.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].proj_mlp.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[25].proj_mlp.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].proj_mlp.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[25].proj_mlp.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[25].proj_mlp.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].proj_mlp.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[25].proj_mlp.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].proj_mlp.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[25].proj_mlp.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[25].proj_mlp.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].proj_mlp.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[25].proj_mlp.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].proj_mlp._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].proj_mlp._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].proj_mlp.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[25].proj_mlp.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[25].proj_mlp.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].proj_mlp._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[25].proj_mlp._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].proj_mlp._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].proj_mlp._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].proj_mlp._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[25].proj_mlp._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[25].proj_mlp._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].proj_out, accessed_by=DictGetItemGuardAccessor(proj_out) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[25].proj_out, 140533225359872) # hidden_states = gate * self.proj_out(hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:98 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].proj_out.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[25].proj_out.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].proj_out.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[25].proj_out.training, 140591004393408) # hidden_states = gate * self.proj_out(hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:98 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].proj_out._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].proj_out.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[25].proj_out.lora_A, 140533225363568) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].proj_out.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].proj_out.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[25].proj_out.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].proj_out.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[25].proj_out.lora_A['default_0'], 140533226592320) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].proj_out.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].proj_out.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[25].proj_out.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].proj_out.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].proj_out.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[25].proj_out.lora_A['default_0'].weight, 140537322380224) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].proj_out.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[25].proj_out.lora_B, 140533225365920) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].proj_out.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].proj_out.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[25].proj_out.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].proj_out.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[25].proj_out.lora_B['default_0'], 140533226589776) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].proj_out.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].proj_out.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[25].proj_out.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].proj_out.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[25].proj_out.base_layer, 140581770779360) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].proj_out.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].proj_out.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[25].proj_out.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].proj_out.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[25].proj_out.lora_dropout, 140533225362128) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].proj_out.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].proj_out.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[25].proj_out.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].proj_out.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[25].proj_out.lora_dropout['default_0'], 140533225362560) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].proj_out.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].proj_out.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[25].proj_out.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].proj_out.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[25].proj_out.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[25].proj_out.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].proj_out.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[25].proj_out.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].proj_out.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[25].proj_out.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[25].proj_out.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].proj_out.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[25].proj_out.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].proj_out._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].proj_out._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].proj_out.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[25].proj_out.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[25].proj_out.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].proj_out._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[25].proj_out._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].proj_out._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].proj_out._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].proj_out._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[25].proj_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[25].proj_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26], accessed_by=GetItemGuardAccessor(26) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[26], 140581770779024) # for index_block, block in enumerate(self.single_transformer_blocks): # diffusers/src/diffusers/models/transformers/transformer_flux.py:509 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[26].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[26].training, 140591004393440) # for index_block, block in enumerate(self.single_transformer_blocks): # diffusers/src/diffusers/models/transformers/transformer_flux.py:509 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].attn, accessed_by=DictGetItemGuardAccessor(attn) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[26].attn, 140581770780224) # attn_output = self.attn( # diffusers/src/diffusers/models/transformers/transformer_flux.py:91 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].attn.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[26].attn.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].attn.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[26].attn.training, 140591004393440) # attn_output = self.attn( # diffusers/src/diffusers/models/transformers/transformer_flux.py:91 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].attn._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].attn.to_k, accessed_by=DictGetItemGuardAccessor(to_k) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[26].attn.to_k, 140533227564160) # key = attn.to_k(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1717 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].attn.to_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[26].attn.to_k.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].attn.to_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[26].attn.to_k.training, 140591004393408) # key = attn.to_k(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1717 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].attn.to_k._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].attn.to_k.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[26].attn.to_k.lora_A, 140533227567712) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].attn.to_k.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].attn.to_k.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[26].attn.to_k.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].attn.to_k.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[26].attn.to_k.lora_A['default_0'], 140533227565792) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].attn.to_k.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].attn.to_k.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[26].attn.to_k.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].attn.to_k.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].attn.to_k.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[26].attn.to_k.lora_A['default_0'].weight, 140537322191296) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].attn.to_k.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[26].attn.to_k.lora_B, 140533227568336) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].attn.to_k.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].attn.to_k.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[26].attn.to_k.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].attn.to_k.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[26].attn.to_k.lora_B['default_0'], 140533227563968) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].attn.to_k.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].attn.to_k.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[26].attn.to_k.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].attn.to_k.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[26].attn.to_k.base_layer, 140581770780368) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].attn.to_k.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].attn.to_k.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[26].attn.to_k.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].attn.to_k.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[26].attn.to_k.lora_dropout, 140533227560800) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].attn.to_k.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].attn.to_k.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[26].attn.to_k.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].attn.to_k.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[26].attn.to_k.lora_dropout['default_0'], 140533227565600) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].attn.to_k.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].attn.to_k.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[26].attn.to_k.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].attn.to_k.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[26].attn.to_k.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[26].attn.to_k.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].attn.to_k.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[26].attn.to_k.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].attn.to_k.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[26].attn.to_k.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[26].attn.to_k.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].attn.to_k.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[26].attn.to_k.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].attn.to_k._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].attn.to_k._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].attn.to_k.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[26].attn.to_k.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[26].attn.to_k.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].attn.to_k._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[26].attn.to_k._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].attn.to_k._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].attn.to_k._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].attn.to_k._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[26].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[26].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].attn.to_q, accessed_by=DictGetItemGuardAccessor(to_q) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[26].attn.to_q, 140533225851200) # query = attn.to_q(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1716 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].attn.to_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[26].attn.to_q.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].attn.to_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[26].attn.to_q.training, 140591004393408) # query = attn.to_q(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1716 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].attn.to_q._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].attn.to_q.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[26].attn.to_q.lora_A, 140533225849424) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].attn.to_q.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].attn.to_q.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[26].attn.to_q.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].attn.to_q.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[26].attn.to_q.lora_A['default_0'], 140533227568048) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].attn.to_q.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].attn.to_q.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[26].attn.to_q.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].attn.to_q.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].attn.to_q.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[26].attn.to_q.lora_A['default_0'].weight, 140537322192736) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].attn.to_q.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[26].attn.to_q.lora_B, 140533225860512) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].attn.to_q.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].attn.to_q.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[26].attn.to_q.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].attn.to_q.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[26].attn.to_q.lora_B['default_0'], 140533227555616) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].attn.to_q.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].attn.to_q.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[26].attn.to_q.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].attn.to_q.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[26].attn.to_q.base_layer, 140581770780464) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].attn.to_q.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].attn.to_q.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[26].attn.to_q.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].attn.to_q.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[26].attn.to_q.lora_dropout, 140533225854512) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].attn.to_q.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].attn.to_q.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[26].attn.to_q.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].attn.to_q.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[26].attn.to_q.lora_dropout['default_0'], 140533225854272) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].attn.to_q.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].attn.to_q.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[26].attn.to_q.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].attn.to_q.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[26].attn.to_q.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[26].attn.to_q.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].attn.to_q.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[26].attn.to_q.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].attn.to_q.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[26].attn.to_q.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[26].attn.to_q.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].attn.to_q.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[26].attn.to_q.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].attn.to_q._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].attn.to_q._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].attn.to_q.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[26].attn.to_q.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[26].attn.to_q.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].attn.to_q._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[26].attn.to_q._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].attn.to_q._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].attn.to_q._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].attn.to_q._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[26].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[26].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].attn.to_v, accessed_by=DictGetItemGuardAccessor(to_v) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[26].attn.to_v, 140533227562816) # value = attn.to_v(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1718 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].attn.to_v.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[26].attn.to_v.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].attn.to_v.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[26].attn.to_v.training, 140591004393408) # value = attn.to_v(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1718 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].attn.to_v._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].attn.to_v.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[26].attn.to_v.lora_A, 140533227559216) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].attn.to_v.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].attn.to_v.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[26].attn.to_v.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].attn.to_v.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[26].attn.to_v.lora_A['default_0'], 140533227559504) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].attn.to_v.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].attn.to_v.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[26].attn.to_v.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].attn.to_v.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].attn.to_v.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[26].attn.to_v.lora_A['default_0'].weight, 140537322185856) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].attn.to_v.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[26].attn.to_v.lora_B, 140533227558784) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].attn.to_v.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].attn.to_v.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[26].attn.to_v.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].attn.to_v.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[26].attn.to_v.lora_B['default_0'], 140533227559552) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].attn.to_v.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].attn.to_v.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[26].attn.to_v.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].attn.to_v.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[26].attn.to_v.base_layer, 140581770780512) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].attn.to_v.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].attn.to_v.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[26].attn.to_v.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].attn.to_v.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[26].attn.to_v.lora_dropout, 140533227566992) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].attn.to_v.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].attn.to_v.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[26].attn.to_v.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].attn.to_v.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[26].attn.to_v.lora_dropout['default_0'], 140533227566464) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].attn.to_v.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].attn.to_v.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[26].attn.to_v.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].attn.to_v.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[26].attn.to_v.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[26].attn.to_v.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].attn.to_v.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[26].attn.to_v.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].attn.to_v.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[26].attn.to_v.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[26].attn.to_v.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].attn.to_v.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[26].attn.to_v.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].attn.to_v._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].attn.to_v._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].attn.to_v.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[26].attn.to_v.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[26].attn.to_v.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].attn.to_v._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[26].attn.to_v._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].attn.to_v._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].attn.to_v._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].attn.to_v._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[26].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[26].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].attn.norm_k, accessed_by=DictGetItemGuardAccessor(norm_k) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[26].attn.norm_k, 140581770780416) # if attn.norm_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1729 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].attn.norm_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[26].attn.norm_k.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].attn.norm_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[26].attn.norm_k.training, 140591004393440) # if attn.norm_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1729 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].attn.norm_k.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[26].attn.norm_k.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].attn.norm_k._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].attn.norm_k.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[26].attn.norm_k.weight, 140581783341632) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].attn.norm_k._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].attn.norm_k._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].attn.norm_k._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].attn.norm_k._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].attn.norm_q, accessed_by=DictGetItemGuardAccessor(norm_q) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[26].attn.norm_q, 140581770780272) # if attn.norm_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1727 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].attn.norm_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[26].attn.norm_q.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].attn.norm_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[26].attn.norm_q.training, 140591004393440) # if attn.norm_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1727 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].attn.norm_q.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[26].attn.norm_q.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].attn.norm_q._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].attn.norm_q.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[26].attn.norm_q.weight, 140581773250448) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].attn.norm_q._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].attn.norm_q._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].attn.norm_q._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].attn.norm_q._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].attn.heads, accessed_by=DictGetItemGuardAccessor(heads) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[26].attn.heads == 24 # head_dim = inner_dim // attn.heads # diffusers/src/diffusers/models/attention_processor.py:1721 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].attn.processor, accessed_by=DictGetItemGuardAccessor(processor) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[26].attn.processor, 93831581524080) # attn_parameters = set(inspect.signature(self.processor.__call__).parameters.keys()) # diffusers/src/diffusers/models/attention_processor.py:479 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[26].attn.processor, 140581770780176) # return self.processor( # diffusers/src/diffusers/models/attention_processor.py:490 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].attn._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].attn._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].attn._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].attn._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].attn.forward, accessed_by=GetAttrGuardAccessor(forward) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].attn.forward, accessed_by=FuncDefaultsGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].attn.forward.__defaults__[0], accessed_by=GetItemGuardAccessor(0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[26].attn.forward.__defaults__[0], 140591004478624) # batch_size, _, _ = hidden_states.shape if encoder_hidden_states is None else encoder_hidden_states.shape # diffusers/src/diffusers/models/attention_processor.py:1713 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].norm, accessed_by=DictGetItemGuardAccessor(norm) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[26].norm, 140581770779840) # norm_hidden_states, gate = self.norm(hidden_states, emb=temb) # diffusers/src/diffusers/models/transformers/transformer_flux.py:88 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].norm.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[26].norm.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].norm.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[26].norm.training, 140591004393440) # norm_hidden_states, gate = self.norm(hidden_states, emb=temb) # diffusers/src/diffusers/models/transformers/transformer_flux.py:88 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].norm._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].norm.norm, accessed_by=DictGetItemGuardAccessor(norm) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[26].norm.norm, 140581770779984) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:171 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].norm.norm.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].norm.norm.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[26].norm.norm.training, 140591004393440) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:171 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].norm.silu, accessed_by=DictGetItemGuardAccessor(silu) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[26].norm.silu, 140581770779888) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].norm.silu.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].norm.silu.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[26].norm.silu.training, 140591004393440) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].norm.linear, accessed_by=DictGetItemGuardAccessor(linear) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[26].norm.linear, 140533226596016) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].norm.linear.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[26].norm.linear.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].norm.linear.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[26].norm.linear.training, 140591004393408) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].norm.linear._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].norm.linear.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[26].norm.linear.lora_A, 140533225862816) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].norm.linear.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].norm.linear.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[26].norm.linear.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].norm.linear.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[26].norm.linear.lora_A['default_0'], 140533225864016) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].norm.linear.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].norm.linear.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[26].norm.linear.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].norm.linear.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].norm.linear.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[26].norm.linear.lora_A['default_0'].weight, 140537322374304) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].norm.linear.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[26].norm.linear.lora_B, 140533225857392) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].norm.linear.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].norm.linear.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[26].norm.linear.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].norm.linear.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[26].norm.linear.lora_B['default_0'], 140533225858880) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].norm.linear.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].norm.linear.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[26].norm.linear.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].norm.linear.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[26].norm.linear.base_layer, 140581770779936) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].norm.linear.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].norm.linear.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[26].norm.linear.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].norm.linear.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[26].norm.linear.lora_dropout, 140533225863440) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].norm.linear.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].norm.linear.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[26].norm.linear.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].norm.linear.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[26].norm.linear.lora_dropout['default_0'], 140533225862384) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].norm.linear.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].norm.linear.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[26].norm.linear.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].norm.linear.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[26].norm.linear.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[26].norm.linear.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].norm.linear.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[26].norm.linear.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].norm.linear.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[26].norm.linear.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[26].norm.linear.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].norm.linear.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[26].norm.linear.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].norm.linear._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].norm.linear._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].norm.linear.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[26].norm.linear.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[26].norm.linear.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].norm.linear._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[26].norm.linear._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].norm.linear._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].norm.linear._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].norm.linear._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[26].norm.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[26].norm.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].norm._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].norm._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].norm._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].norm._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].act_mlp, accessed_by=DictGetItemGuardAccessor(act_mlp) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[26].act_mlp, 140581770780080) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].act_mlp.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].act_mlp.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[26].act_mlp.training, 140591004393440) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].proj_mlp, accessed_by=DictGetItemGuardAccessor(proj_mlp) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[26].proj_mlp, 140533225860128) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].proj_mlp.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[26].proj_mlp.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].proj_mlp.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[26].proj_mlp.training, 140591004393408) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].proj_mlp._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].proj_mlp.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[26].proj_mlp.lora_A, 140533225852544) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].proj_mlp.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].proj_mlp.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[26].proj_mlp.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].proj_mlp.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[26].proj_mlp.lora_A['default_0'], 140533225860032) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].proj_mlp.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].proj_mlp.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[26].proj_mlp.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].proj_mlp.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].proj_mlp.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[26].proj_mlp.lora_A['default_0'].weight, 140537322189536) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].proj_mlp.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[26].proj_mlp.lora_B, 140533225855808) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].proj_mlp.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].proj_mlp.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[26].proj_mlp.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].proj_mlp.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[26].proj_mlp.lora_B['default_0'], 140533225861952) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].proj_mlp.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].proj_mlp.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[26].proj_mlp.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].proj_mlp.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[26].proj_mlp.base_layer, 140581770780032) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].proj_mlp.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].proj_mlp.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[26].proj_mlp.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].proj_mlp.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[26].proj_mlp.lora_dropout, 140533225864736) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].proj_mlp.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].proj_mlp.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[26].proj_mlp.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].proj_mlp.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[26].proj_mlp.lora_dropout['default_0'], 140533225859552) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].proj_mlp.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].proj_mlp.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[26].proj_mlp.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].proj_mlp.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[26].proj_mlp.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[26].proj_mlp.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].proj_mlp.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[26].proj_mlp.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].proj_mlp.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[26].proj_mlp.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[26].proj_mlp.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].proj_mlp.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[26].proj_mlp.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].proj_mlp._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].proj_mlp._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].proj_mlp.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[26].proj_mlp.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[26].proj_mlp.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].proj_mlp._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[26].proj_mlp._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].proj_mlp._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].proj_mlp._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].proj_mlp._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[26].proj_mlp._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[26].proj_mlp._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].proj_out, accessed_by=DictGetItemGuardAccessor(proj_out) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[26].proj_out, 140533225851152) # hidden_states = gate * self.proj_out(hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:98 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].proj_out.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[26].proj_out.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].proj_out.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[26].proj_out.training, 140591004393408) # hidden_states = gate * self.proj_out(hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:98 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].proj_out._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].proj_out.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[26].proj_out.lora_A, 140533225855520) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].proj_out.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].proj_out.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[26].proj_out.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].proj_out.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[26].proj_out.lora_A['default_0'], 140533225850720) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].proj_out.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].proj_out.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[26].proj_out.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].proj_out.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].proj_out.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[26].proj_out.lora_A['default_0'].weight, 140537322180656) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].proj_out.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[26].proj_out.lora_B, 140533225851584) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].proj_out.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].proj_out.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[26].proj_out.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].proj_out.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[26].proj_out.lora_B['default_0'], 140533225853072) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].proj_out.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].proj_out.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[26].proj_out.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].proj_out.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[26].proj_out.base_layer, 140581770780128) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].proj_out.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].proj_out.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[26].proj_out.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].proj_out.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[26].proj_out.lora_dropout, 140533225853648) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].proj_out.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].proj_out.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[26].proj_out.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].proj_out.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[26].proj_out.lora_dropout['default_0'], 140533225853408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].proj_out.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].proj_out.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[26].proj_out.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].proj_out.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[26].proj_out.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[26].proj_out.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].proj_out.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[26].proj_out.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].proj_out.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[26].proj_out.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[26].proj_out.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].proj_out.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[26].proj_out.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].proj_out._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].proj_out._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].proj_out.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[26].proj_out.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[26].proj_out.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].proj_out._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[26].proj_out._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].proj_out._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].proj_out._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].proj_out._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[26].proj_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[26].proj_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27], accessed_by=GetItemGuardAccessor(27) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[27], 140581770779792) # for index_block, block in enumerate(self.single_transformer_blocks): # diffusers/src/diffusers/models/transformers/transformer_flux.py:509 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[27].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[27].training, 140591004393440) # for index_block, block in enumerate(self.single_transformer_blocks): # diffusers/src/diffusers/models/transformers/transformer_flux.py:509 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].attn, accessed_by=DictGetItemGuardAccessor(attn) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[27].attn, 140581770780992) # attn_output = self.attn( # diffusers/src/diffusers/models/transformers/transformer_flux.py:91 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].attn.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[27].attn.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].attn.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[27].attn.training, 140591004393440) # attn_output = self.attn( # diffusers/src/diffusers/models/transformers/transformer_flux.py:91 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].attn._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].attn.to_k, accessed_by=DictGetItemGuardAccessor(to_k) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[27].attn.to_k, 140533230201168) # key = attn.to_k(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1717 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].attn.to_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[27].attn.to_k.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].attn.to_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[27].attn.to_k.training, 140591004393408) # key = attn.to_k(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1717 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].attn.to_k._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].attn.to_k.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[27].attn.to_k.lora_A, 140533231235424) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].attn.to_k.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].attn.to_k.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[27].attn.to_k.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].attn.to_k.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[27].attn.to_k.lora_A['default_0'], 140533231236960) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].attn.to_k.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].attn.to_k.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[27].attn.to_k.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].attn.to_k.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].attn.to_k.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[27].attn.to_k.lora_A['default_0'].weight, 140537322077568) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].attn.to_k.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[27].attn.to_k.lora_B, 140533231225968) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].attn.to_k.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].attn.to_k.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[27].attn.to_k.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].attn.to_k.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[27].attn.to_k.lora_B['default_0'], 140533231237536) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].attn.to_k.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].attn.to_k.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[27].attn.to_k.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].attn.to_k.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[27].attn.to_k.base_layer, 140581770781136) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].attn.to_k.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].attn.to_k.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[27].attn.to_k.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].attn.to_k.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[27].attn.to_k.lora_dropout, 140533230200496) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].attn.to_k.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].attn.to_k.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[27].attn.to_k.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].attn.to_k.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[27].attn.to_k.lora_dropout['default_0'], 140533230201600) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].attn.to_k.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].attn.to_k.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[27].attn.to_k.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].attn.to_k.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[27].attn.to_k.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[27].attn.to_k.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].attn.to_k.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[27].attn.to_k.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].attn.to_k.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[27].attn.to_k.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[27].attn.to_k.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].attn.to_k.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[27].attn.to_k.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].attn.to_k._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].attn.to_k._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].attn.to_k.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[27].attn.to_k.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[27].attn.to_k.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].attn.to_k._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[27].attn.to_k._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].attn.to_k._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].attn.to_k._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].attn.to_k._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[27].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[27].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].attn.to_q, accessed_by=DictGetItemGuardAccessor(to_q) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[27].attn.to_q, 140533230196032) # query = attn.to_q(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1716 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].attn.to_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[27].attn.to_q.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].attn.to_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[27].attn.to_q.training, 140591004393408) # query = attn.to_q(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1716 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].attn.to_q._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].attn.to_q.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[27].attn.to_q.lora_A, 140533230198432) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].attn.to_q.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].attn.to_q.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[27].attn.to_q.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].attn.to_q.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[27].attn.to_q.lora_A['default_0'], 140533230200784) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].attn.to_q.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].attn.to_q.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[27].attn.to_q.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].attn.to_q.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].attn.to_q.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[27].attn.to_q.lora_A['default_0'].weight, 140537322064688) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].attn.to_q.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[27].attn.to_q.lora_B, 140533230198864) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].attn.to_q.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].attn.to_q.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[27].attn.to_q.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].attn.to_q.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[27].attn.to_q.lora_B['default_0'], 140533230200016) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].attn.to_q.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].attn.to_q.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[27].attn.to_q.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].attn.to_q.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[27].attn.to_q.base_layer, 140581770781232) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].attn.to_q.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].attn.to_q.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[27].attn.to_q.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].attn.to_q.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[27].attn.to_q.lora_dropout, 140533230195648) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].attn.to_q.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].attn.to_q.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[27].attn.to_q.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].attn.to_q.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[27].attn.to_q.lora_dropout['default_0'], 140533230196752) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].attn.to_q.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].attn.to_q.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[27].attn.to_q.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].attn.to_q.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[27].attn.to_q.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[27].attn.to_q.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].attn.to_q.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[27].attn.to_q.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].attn.to_q.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[27].attn.to_q.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[27].attn.to_q.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].attn.to_q.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[27].attn.to_q.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].attn.to_q._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].attn.to_q._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].attn.to_q.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[27].attn.to_q.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[27].attn.to_q.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].attn.to_q._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[27].attn.to_q._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].attn.to_q._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].attn.to_q._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].attn.to_q._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[27].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[27].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].attn.to_v, accessed_by=DictGetItemGuardAccessor(to_v) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[27].attn.to_v, 140533231224912) # value = attn.to_v(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1718 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].attn.to_v.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[27].attn.to_v.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].attn.to_v.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[27].attn.to_v.training, 140591004393408) # value = attn.to_v(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1718 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].attn.to_v._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].attn.to_v.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[27].attn.to_v.lora_A, 140533231234224) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].attn.to_v.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].attn.to_v.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[27].attn.to_v.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].attn.to_v.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[27].attn.to_v.lora_A['default_0'], 140533231231152) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].attn.to_v.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].attn.to_v.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[27].attn.to_v.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].attn.to_v.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].attn.to_v.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[27].attn.to_v.lora_A['default_0'].weight, 140537322063728) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].attn.to_v.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[27].attn.to_v.lora_B, 140533231232544) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].attn.to_v.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].attn.to_v.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[27].attn.to_v.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].attn.to_v.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[27].attn.to_v.lora_B['default_0'], 140533231227264) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].attn.to_v.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].attn.to_v.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[27].attn.to_v.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].attn.to_v.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[27].attn.to_v.base_layer, 140581770781280) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].attn.to_v.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].attn.to_v.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[27].attn.to_v.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].attn.to_v.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[27].attn.to_v.lora_dropout, 140533231233744) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].attn.to_v.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].attn.to_v.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[27].attn.to_v.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].attn.to_v.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[27].attn.to_v.lora_dropout['default_0'], 140533231234512) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].attn.to_v.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].attn.to_v.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[27].attn.to_v.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].attn.to_v.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[27].attn.to_v.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[27].attn.to_v.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].attn.to_v.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[27].attn.to_v.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].attn.to_v.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[27].attn.to_v.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[27].attn.to_v.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].attn.to_v.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[27].attn.to_v.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].attn.to_v._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].attn.to_v._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].attn.to_v.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[27].attn.to_v.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[27].attn.to_v.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].attn.to_v._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[27].attn.to_v._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].attn.to_v._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].attn.to_v._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].attn.to_v._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[27].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[27].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].attn.norm_k, accessed_by=DictGetItemGuardAccessor(norm_k) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[27].attn.norm_k, 140581770781184) # if attn.norm_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1729 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].attn.norm_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[27].attn.norm_k.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].attn.norm_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[27].attn.norm_k.training, 140591004393440) # if attn.norm_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1729 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].attn.norm_k.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[27].attn.norm_k.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].attn.norm_k._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].attn.norm_k.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[27].attn.norm_k.weight, 140581773250208) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].attn.norm_k._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].attn.norm_k._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].attn.norm_k._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].attn.norm_k._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].attn.norm_q, accessed_by=DictGetItemGuardAccessor(norm_q) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[27].attn.norm_q, 140581770781040) # if attn.norm_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1727 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].attn.norm_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[27].attn.norm_q.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].attn.norm_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[27].attn.norm_q.training, 140591004393440) # if attn.norm_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1727 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].attn.norm_q.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[27].attn.norm_q.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].attn.norm_q._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].attn.norm_q.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[27].attn.norm_q.weight, 140581771022304) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].attn.norm_q._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].attn.norm_q._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].attn.norm_q._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].attn.norm_q._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].attn.heads, accessed_by=DictGetItemGuardAccessor(heads) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[27].attn.heads == 24 # head_dim = inner_dim // attn.heads # diffusers/src/diffusers/models/attention_processor.py:1721 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].attn.processor, accessed_by=DictGetItemGuardAccessor(processor) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[27].attn.processor, 93831581524080) # attn_parameters = set(inspect.signature(self.processor.__call__).parameters.keys()) # diffusers/src/diffusers/models/attention_processor.py:479 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[27].attn.processor, 140581770780944) # return self.processor( # diffusers/src/diffusers/models/attention_processor.py:490 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].attn._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].attn._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].attn._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].attn._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].attn.forward, accessed_by=GetAttrGuardAccessor(forward) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].attn.forward, accessed_by=FuncDefaultsGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].attn.forward.__defaults__[0], accessed_by=GetItemGuardAccessor(0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[27].attn.forward.__defaults__[0], 140591004478624) # batch_size, _, _ = hidden_states.shape if encoder_hidden_states is None else encoder_hidden_states.shape # diffusers/src/diffusers/models/attention_processor.py:1713 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].norm, accessed_by=DictGetItemGuardAccessor(norm) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[27].norm, 140581770780608) # norm_hidden_states, gate = self.norm(hidden_states, emb=temb) # diffusers/src/diffusers/models/transformers/transformer_flux.py:88 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].norm.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[27].norm.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].norm.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[27].norm.training, 140591004393440) # norm_hidden_states, gate = self.norm(hidden_states, emb=temb) # diffusers/src/diffusers/models/transformers/transformer_flux.py:88 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].norm._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].norm.norm, accessed_by=DictGetItemGuardAccessor(norm) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[27].norm.norm, 140581770780752) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:171 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].norm.norm.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].norm.norm.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[27].norm.norm.training, 140591004393440) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:171 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].norm.silu, accessed_by=DictGetItemGuardAccessor(silu) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[27].norm.silu, 140581770780656) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].norm.silu.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].norm.silu.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[27].norm.silu.training, 140591004393440) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].norm.linear, accessed_by=DictGetItemGuardAccessor(linear) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[27].norm.linear, 140533227558880) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].norm.linear.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[27].norm.linear.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].norm.linear.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[27].norm.linear.training, 140591004393408) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].norm.linear._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].norm.linear.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[27].norm.linear.lora_A, 140533227553168) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].norm.linear.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].norm.linear.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[27].norm.linear.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].norm.linear.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[27].norm.linear.lora_A['default_0'], 140533227554608) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].norm.linear.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].norm.linear.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[27].norm.linear.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].norm.linear.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].norm.linear.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[27].norm.linear.lora_A['default_0'].weight, 140537322178336) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].norm.linear.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[27].norm.linear.lora_B, 140533227553504) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].norm.linear.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].norm.linear.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[27].norm.linear.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].norm.linear.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[27].norm.linear.lora_B['default_0'], 140533227554416) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].norm.linear.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].norm.linear.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[27].norm.linear.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].norm.linear.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[27].norm.linear.base_layer, 140581770780704) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].norm.linear.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].norm.linear.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[27].norm.linear.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].norm.linear.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[27].norm.linear.lora_dropout, 140533227554272) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].norm.linear.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].norm.linear.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[27].norm.linear.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].norm.linear.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[27].norm.linear.lora_dropout['default_0'], 140533227556624) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].norm.linear.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].norm.linear.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[27].norm.linear.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].norm.linear.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[27].norm.linear.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[27].norm.linear.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].norm.linear.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[27].norm.linear.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].norm.linear.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[27].norm.linear.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[27].norm.linear.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].norm.linear.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[27].norm.linear.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].norm.linear._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].norm.linear._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].norm.linear.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[27].norm.linear.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[27].norm.linear.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].norm.linear._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[27].norm.linear._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].norm.linear._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].norm.linear._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].norm.linear._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[27].norm.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[27].norm.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].norm._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].norm._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].norm._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].norm._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].act_mlp, accessed_by=DictGetItemGuardAccessor(act_mlp) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[27].act_mlp, 140581770780848) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].act_mlp.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].act_mlp.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[27].act_mlp.training, 140591004393440) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].proj_mlp, accessed_by=DictGetItemGuardAccessor(proj_mlp) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[27].proj_mlp, 140533227562672) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].proj_mlp.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[27].proj_mlp.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].proj_mlp.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[27].proj_mlp.training, 140591004393408) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].proj_mlp._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].proj_mlp.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[27].proj_mlp.lora_A, 140533230195600) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].proj_mlp.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].proj_mlp.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[27].proj_mlp.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].proj_mlp.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[27].proj_mlp.lora_A['default_0'], 140533230199440) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].proj_mlp.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].proj_mlp.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[27].proj_mlp.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].proj_mlp.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].proj_mlp.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[27].proj_mlp.lora_A['default_0'].weight, 140537322073648) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].proj_mlp.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[27].proj_mlp.lora_B, 140533230206832) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].proj_mlp.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].proj_mlp.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[27].proj_mlp.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].proj_mlp.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[27].proj_mlp.lora_B['default_0'], 140533230199392) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].proj_mlp.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].proj_mlp.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[27].proj_mlp.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].proj_mlp.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[27].proj_mlp.base_layer, 140581770780800) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].proj_mlp.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].proj_mlp.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[27].proj_mlp.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].proj_mlp.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[27].proj_mlp.lora_dropout, 140533227567568) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].proj_mlp.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].proj_mlp.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[27].proj_mlp.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].proj_mlp.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[27].proj_mlp.lora_dropout['default_0'], 140533227564784) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].proj_mlp.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].proj_mlp.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[27].proj_mlp.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].proj_mlp.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[27].proj_mlp.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[27].proj_mlp.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].proj_mlp.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[27].proj_mlp.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].proj_mlp.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[27].proj_mlp.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[27].proj_mlp.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].proj_mlp.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[27].proj_mlp.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].proj_mlp._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].proj_mlp._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].proj_mlp.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[27].proj_mlp.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[27].proj_mlp.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].proj_mlp._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[27].proj_mlp._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].proj_mlp._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].proj_mlp._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].proj_mlp._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[27].proj_mlp._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[27].proj_mlp._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].proj_out, accessed_by=DictGetItemGuardAccessor(proj_out) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[27].proj_out, 140533230195408) # hidden_states = gate * self.proj_out(hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:98 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].proj_out.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[27].proj_out.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].proj_out.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[27].proj_out.training, 140591004393408) # hidden_states = gate * self.proj_out(hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:98 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].proj_out._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].proj_out.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[27].proj_out.lora_A, 140533230198960) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].proj_out.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].proj_out.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[27].proj_out.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].proj_out.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[27].proj_out.lora_A['default_0'], 140533230194208) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].proj_out.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].proj_out.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[27].proj_out.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].proj_out.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].proj_out.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[27].proj_out.lora_A['default_0'].weight, 140537322063808) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].proj_out.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[27].proj_out.lora_B, 140533230193440) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].proj_out.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].proj_out.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[27].proj_out.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].proj_out.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[27].proj_out.lora_B['default_0'], 140533230194160) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].proj_out.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].proj_out.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[27].proj_out.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].proj_out.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[27].proj_out.base_layer, 140581770780896) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].proj_out.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].proj_out.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[27].proj_out.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].proj_out.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[27].proj_out.lora_dropout, 140533230196320) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].proj_out.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].proj_out.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[27].proj_out.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].proj_out.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[27].proj_out.lora_dropout['default_0'], 140533230201456) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].proj_out.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].proj_out.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[27].proj_out.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].proj_out.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[27].proj_out.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[27].proj_out.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].proj_out.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[27].proj_out.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].proj_out.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[27].proj_out.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[27].proj_out.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].proj_out.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[27].proj_out.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].proj_out._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].proj_out._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].proj_out.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[27].proj_out.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[27].proj_out.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].proj_out._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[27].proj_out._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].proj_out._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].proj_out._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].proj_out._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[27].proj_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[27].proj_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28], accessed_by=GetItemGuardAccessor(28) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[28], 140581770780560) # for index_block, block in enumerate(self.single_transformer_blocks): # diffusers/src/diffusers/models/transformers/transformer_flux.py:509 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[28].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[28].training, 140591004393440) # for index_block, block in enumerate(self.single_transformer_blocks): # diffusers/src/diffusers/models/transformers/transformer_flux.py:509 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].attn, accessed_by=DictGetItemGuardAccessor(attn) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[28].attn, 140581770781760) # attn_output = self.attn( # diffusers/src/diffusers/models/transformers/transformer_flux.py:91 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].attn.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[28].attn.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].attn.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[28].attn.training, 140591004393440) # attn_output = self.attn( # diffusers/src/diffusers/models/transformers/transformer_flux.py:91 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].attn._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].attn.to_k, accessed_by=DictGetItemGuardAccessor(to_k) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[28].attn.to_k, 140533232469328) # key = attn.to_k(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1717 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].attn.to_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[28].attn.to_k.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].attn.to_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[28].attn.to_k.training, 140591004393408) # key = attn.to_k(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1717 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].attn.to_k._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].attn.to_k.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[28].attn.to_k.lora_A, 140533232470576) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].attn.to_k.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].attn.to_k.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[28].attn.to_k.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].attn.to_k.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[28].attn.to_k.lora_A['default_0'], 140533232472352) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].attn.to_k.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].attn.to_k.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[28].attn.to_k.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].attn.to_k.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].attn.to_k.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[28].attn.to_k.lora_A['default_0'].weight, 140537321961440) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].attn.to_k.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[28].attn.to_k.lora_B, 140533232468176) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].attn.to_k.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].attn.to_k.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[28].attn.to_k.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].attn.to_k.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[28].attn.to_k.lora_B['default_0'], 140533232474128) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].attn.to_k.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].attn.to_k.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[28].attn.to_k.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].attn.to_k.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[28].attn.to_k.base_layer, 140581770781904) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].attn.to_k.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].attn.to_k.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[28].attn.to_k.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].attn.to_k.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[28].attn.to_k.lora_dropout, 140533232469520) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].attn.to_k.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].attn.to_k.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[28].attn.to_k.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].attn.to_k.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[28].attn.to_k.lora_dropout['default_0'], 140533232470816) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].attn.to_k.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].attn.to_k.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[28].attn.to_k.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].attn.to_k.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[28].attn.to_k.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[28].attn.to_k.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].attn.to_k.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[28].attn.to_k.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].attn.to_k.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[28].attn.to_k.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[28].attn.to_k.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].attn.to_k.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[28].attn.to_k.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].attn.to_k._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].attn.to_k._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].attn.to_k.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[28].attn.to_k.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[28].attn.to_k.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].attn.to_k._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[28].attn.to_k._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].attn.to_k._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].attn.to_k._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].attn.to_k._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[28].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[28].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].attn.to_q, accessed_by=DictGetItemGuardAccessor(to_q) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[28].attn.to_q, 140533232473216) # query = attn.to_q(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1716 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].attn.to_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[28].attn.to_q.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].attn.to_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[28].attn.to_q.training, 140591004393408) # query = attn.to_q(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1716 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].attn.to_q._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].attn.to_q.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[28].attn.to_q.lora_A, 140533232477536) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].attn.to_q.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].attn.to_q.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[28].attn.to_q.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].attn.to_q.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[28].attn.to_q.lora_A['default_0'], 140533232469424) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].attn.to_q.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].attn.to_q.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[28].attn.to_q.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].attn.to_q.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].attn.to_q.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[28].attn.to_q.lora_A['default_0'].weight, 140537321954320) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].attn.to_q.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[28].attn.to_q.lora_B, 140533232477488) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].attn.to_q.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].attn.to_q.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[28].attn.to_q.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].attn.to_q.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[28].attn.to_q.lora_B['default_0'], 140533232468368) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].attn.to_q.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].attn.to_q.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[28].attn.to_q.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].attn.to_q.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[28].attn.to_q.base_layer, 140581770782000) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].attn.to_q.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].attn.to_q.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[28].attn.to_q.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].attn.to_q.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[28].attn.to_q.lora_dropout, 140533232473744) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].attn.to_q.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].attn.to_q.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[28].attn.to_q.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].attn.to_q.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[28].attn.to_q.lora_dropout['default_0'], 140533232474416) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].attn.to_q.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].attn.to_q.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[28].attn.to_q.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].attn.to_q.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[28].attn.to_q.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[28].attn.to_q.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].attn.to_q.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[28].attn.to_q.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].attn.to_q.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[28].attn.to_q.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[28].attn.to_q.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].attn.to_q.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[28].attn.to_q.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].attn.to_q._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].attn.to_q._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].attn.to_q.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[28].attn.to_q.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[28].attn.to_q.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].attn.to_q._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[28].attn.to_q._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].attn.to_q._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].attn.to_q._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].attn.to_q._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[28].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[28].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].attn.to_v, accessed_by=DictGetItemGuardAccessor(to_v) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[28].attn.to_v, 140533217500448) # value = attn.to_v(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1718 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].attn.to_v.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[28].attn.to_v.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].attn.to_v.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[28].attn.to_v.training, 140591004393408) # value = attn.to_v(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1718 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].attn.to_v._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].attn.to_v.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[28].attn.to_v.lora_A, 140533233888816) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].attn.to_v.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].attn.to_v.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[28].attn.to_v.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].attn.to_v.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[28].attn.to_v.lora_A['default_0'], 140533233888048) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].attn.to_v.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].attn.to_v.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[28].attn.to_v.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].attn.to_v.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].attn.to_v.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[28].attn.to_v.lora_A['default_0'].weight, 140537321954160) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].attn.to_v.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[28].attn.to_v.lora_B, 140533233889680) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].attn.to_v.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].attn.to_v.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[28].attn.to_v.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].attn.to_v.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[28].attn.to_v.lora_B['default_0'], 140533233892848) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].attn.to_v.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].attn.to_v.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[28].attn.to_v.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].attn.to_v.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[28].attn.to_v.base_layer, 140581770782048) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].attn.to_v.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].attn.to_v.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[28].attn.to_v.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].attn.to_v.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[28].attn.to_v.lora_dropout, 140533233890688) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].attn.to_v.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].attn.to_v.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[28].attn.to_v.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].attn.to_v.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[28].attn.to_v.lora_dropout['default_0'], 140533233886848) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].attn.to_v.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].attn.to_v.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[28].attn.to_v.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].attn.to_v.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[28].attn.to_v.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[28].attn.to_v.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].attn.to_v.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[28].attn.to_v.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].attn.to_v.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[28].attn.to_v.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[28].attn.to_v.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].attn.to_v.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[28].attn.to_v.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].attn.to_v._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].attn.to_v._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].attn.to_v.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[28].attn.to_v.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[28].attn.to_v.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].attn.to_v._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[28].attn.to_v._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].attn.to_v._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].attn.to_v._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].attn.to_v._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[28].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[28].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].attn.norm_k, accessed_by=DictGetItemGuardAccessor(norm_k) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[28].attn.norm_k, 140581770781952) # if attn.norm_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1729 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].attn.norm_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[28].attn.norm_k.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].attn.norm_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[28].attn.norm_k.training, 140591004393440) # if attn.norm_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1729 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].attn.norm_k.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[28].attn.norm_k.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].attn.norm_k._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].attn.norm_k.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[28].attn.norm_k.weight, 140581765880688) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].attn.norm_k._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].attn.norm_k._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].attn.norm_k._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].attn.norm_k._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].attn.norm_q, accessed_by=DictGetItemGuardAccessor(norm_q) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[28].attn.norm_q, 140581770781808) # if attn.norm_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1727 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].attn.norm_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[28].attn.norm_q.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].attn.norm_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[28].attn.norm_q.training, 140591004393440) # if attn.norm_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1727 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].attn.norm_q.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[28].attn.norm_q.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].attn.norm_q._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].attn.norm_q.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[28].attn.norm_q.weight, 140581783345232) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].attn.norm_q._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].attn.norm_q._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].attn.norm_q._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].attn.norm_q._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].attn.heads, accessed_by=DictGetItemGuardAccessor(heads) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[28].attn.heads == 24 # head_dim = inner_dim // attn.heads # diffusers/src/diffusers/models/attention_processor.py:1721 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].attn.processor, accessed_by=DictGetItemGuardAccessor(processor) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[28].attn.processor, 93831581524080) # attn_parameters = set(inspect.signature(self.processor.__call__).parameters.keys()) # diffusers/src/diffusers/models/attention_processor.py:479 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[28].attn.processor, 140581770781712) # return self.processor( # diffusers/src/diffusers/models/attention_processor.py:490 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].attn._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].attn._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].attn._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].attn._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].attn.forward, accessed_by=GetAttrGuardAccessor(forward) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].attn.forward, accessed_by=FuncDefaultsGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].attn.forward.__defaults__[0], accessed_by=GetItemGuardAccessor(0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[28].attn.forward.__defaults__[0], 140591004478624) # batch_size, _, _ = hidden_states.shape if encoder_hidden_states is None else encoder_hidden_states.shape # diffusers/src/diffusers/models/attention_processor.py:1713 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].norm, accessed_by=DictGetItemGuardAccessor(norm) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[28].norm, 140581770781376) # norm_hidden_states, gate = self.norm(hidden_states, emb=temb) # diffusers/src/diffusers/models/transformers/transformer_flux.py:88 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].norm.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[28].norm.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].norm.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[28].norm.training, 140591004393440) # norm_hidden_states, gate = self.norm(hidden_states, emb=temb) # diffusers/src/diffusers/models/transformers/transformer_flux.py:88 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].norm._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].norm.norm, accessed_by=DictGetItemGuardAccessor(norm) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[28].norm.norm, 140581770781520) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:171 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].norm.norm.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].norm.norm.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[28].norm.norm.training, 140591004393440) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:171 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].norm.silu, accessed_by=DictGetItemGuardAccessor(silu) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[28].norm.silu, 140581770781424) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].norm.silu.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].norm.silu.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[28].norm.silu.training, 140591004393440) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].norm.linear, accessed_by=DictGetItemGuardAccessor(linear) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[28].norm.linear, 140533231228656) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].norm.linear.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[28].norm.linear.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].norm.linear.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[28].norm.linear.training, 140591004393408) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].norm.linear._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].norm.linear.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[28].norm.linear.lora_A, 140533231232640) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].norm.linear.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].norm.linear.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[28].norm.linear.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].norm.linear.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[28].norm.linear.lora_A['default_0'], 140533231224768) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].norm.linear.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].norm.linear.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[28].norm.linear.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].norm.linear.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].norm.linear.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[28].norm.linear.lora_A['default_0'].weight, 140537322067648) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].norm.linear.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[28].norm.linear.lora_B, 140533231224144) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].norm.linear.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].norm.linear.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[28].norm.linear.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].norm.linear.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[28].norm.linear.lora_B['default_0'], 140533231224576) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].norm.linear.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].norm.linear.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[28].norm.linear.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].norm.linear.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[28].norm.linear.base_layer, 140581770781472) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].norm.linear.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].norm.linear.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[28].norm.linear.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].norm.linear.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[28].norm.linear.lora_dropout, 140533231229568) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].norm.linear.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].norm.linear.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[28].norm.linear.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].norm.linear.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[28].norm.linear.lora_dropout['default_0'], 140533231222896) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].norm.linear.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].norm.linear.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[28].norm.linear.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].norm.linear.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[28].norm.linear.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[28].norm.linear.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].norm.linear.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[28].norm.linear.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].norm.linear.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[28].norm.linear.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[28].norm.linear.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].norm.linear.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[28].norm.linear.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].norm.linear._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].norm.linear._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].norm.linear.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[28].norm.linear.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[28].norm.linear.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].norm.linear._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[28].norm.linear._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].norm.linear._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].norm.linear._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].norm.linear._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[28].norm.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[28].norm.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].norm._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].norm._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].norm._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].norm._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].act_mlp, accessed_by=DictGetItemGuardAccessor(act_mlp) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[28].act_mlp, 140581770781616) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].act_mlp.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].act_mlp.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[28].act_mlp.training, 140591004393440) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].proj_mlp, accessed_by=DictGetItemGuardAccessor(proj_mlp) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[28].proj_mlp, 140533231225536) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].proj_mlp.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[28].proj_mlp.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].proj_mlp.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[28].proj_mlp.training, 140591004393408) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].proj_mlp._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].proj_mlp.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[28].proj_mlp.lora_A, 140533231229520) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].proj_mlp.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].proj_mlp.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[28].proj_mlp.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].proj_mlp.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[28].proj_mlp.lora_A['default_0'], 140533232472016) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].proj_mlp.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].proj_mlp.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[28].proj_mlp.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].proj_mlp.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].proj_mlp.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[28].proj_mlp.lora_A['default_0'].weight, 140537322063568) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].proj_mlp.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[28].proj_mlp.lora_B, 140533231231680) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].proj_mlp.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].proj_mlp.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[28].proj_mlp.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].proj_mlp.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[28].proj_mlp.lora_B['default_0'], 140533232483728) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].proj_mlp.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].proj_mlp.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[28].proj_mlp.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].proj_mlp.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[28].proj_mlp.base_layer, 140581770781568) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].proj_mlp.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].proj_mlp.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[28].proj_mlp.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].proj_mlp.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[28].proj_mlp.lora_dropout, 140533231227792) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].proj_mlp.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].proj_mlp.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[28].proj_mlp.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].proj_mlp.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[28].proj_mlp.lora_dropout['default_0'], 140533231228224) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].proj_mlp.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].proj_mlp.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[28].proj_mlp.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].proj_mlp.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[28].proj_mlp.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[28].proj_mlp.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].proj_mlp.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[28].proj_mlp.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].proj_mlp.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[28].proj_mlp.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[28].proj_mlp.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].proj_mlp.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[28].proj_mlp.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].proj_mlp._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].proj_mlp._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].proj_mlp.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[28].proj_mlp.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[28].proj_mlp.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].proj_mlp._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[28].proj_mlp._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].proj_mlp._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].proj_mlp._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].proj_mlp._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[28].proj_mlp._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[28].proj_mlp._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].proj_out, accessed_by=DictGetItemGuardAccessor(proj_out) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[28].proj_out, 140533232470768) # hidden_states = gate * self.proj_out(hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:98 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].proj_out.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[28].proj_out.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].proj_out.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[28].proj_out.training, 140591004393408) # hidden_states = gate * self.proj_out(hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:98 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].proj_out._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].proj_out.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[28].proj_out.lora_A, 140533232477920) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].proj_out.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].proj_out.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[28].proj_out.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].proj_out.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[28].proj_out.lora_A['default_0'], 140533232480656) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].proj_out.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].proj_out.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[28].proj_out.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].proj_out.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].proj_out.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[28].proj_out.lora_A['default_0'].weight, 140537321961840) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].proj_out.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[28].proj_out.lora_B, 140533232480368) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].proj_out.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].proj_out.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[28].proj_out.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].proj_out.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[28].proj_out.lora_B['default_0'], 140533232472976) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].proj_out.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].proj_out.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[28].proj_out.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].proj_out.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[28].proj_out.base_layer, 140581770781664) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].proj_out.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].proj_out.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[28].proj_out.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].proj_out.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[28].proj_out.lora_dropout, 140533232483536) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].proj_out.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].proj_out.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[28].proj_out.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].proj_out.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[28].proj_out.lora_dropout['default_0'], 140533232483824) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].proj_out.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].proj_out.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[28].proj_out.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].proj_out.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[28].proj_out.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[28].proj_out.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].proj_out.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[28].proj_out.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].proj_out.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[28].proj_out.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[28].proj_out.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].proj_out.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[28].proj_out.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].proj_out._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].proj_out._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].proj_out.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[28].proj_out.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[28].proj_out.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].proj_out._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[28].proj_out._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].proj_out._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].proj_out._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].proj_out._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[28].proj_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[28].proj_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29], accessed_by=GetItemGuardAccessor(29) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[29], 140581770781328) # for index_block, block in enumerate(self.single_transformer_blocks): # diffusers/src/diffusers/models/transformers/transformer_flux.py:509 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[29].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[29].training, 140591004393440) # for index_block, block in enumerate(self.single_transformer_blocks): # diffusers/src/diffusers/models/transformers/transformer_flux.py:509 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].attn, accessed_by=DictGetItemGuardAccessor(attn) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[29].attn, 140581770782528) # attn_output = self.attn( # diffusers/src/diffusers/models/transformers/transformer_flux.py:91 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].attn.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[29].attn.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].attn.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[29].attn.training, 140591004393440) # attn_output = self.attn( # diffusers/src/diffusers/models/transformers/transformer_flux.py:91 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].attn._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].attn.to_k, accessed_by=DictGetItemGuardAccessor(to_k) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[29].attn.to_k, 140533234987072) # key = attn.to_k(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1717 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].attn.to_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[29].attn.to_k.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].attn.to_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[29].attn.to_k.training, 140591004393408) # key = attn.to_k(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1717 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].attn.to_k._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].attn.to_k.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[29].attn.to_k.lora_A, 140533234988608) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].attn.to_k.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].attn.to_k.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[29].attn.to_k.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].attn.to_k.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[29].attn.to_k.lora_A['default_0'], 140533234987648) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].attn.to_k.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].attn.to_k.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[29].attn.to_k.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].attn.to_k.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].attn.to_k.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[29].attn.to_k.lora_A['default_0'].weight, 140537321781936) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].attn.to_k.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[29].attn.to_k.lora_B, 140533234988080) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].attn.to_k.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].attn.to_k.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[29].attn.to_k.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].attn.to_k.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[29].attn.to_k.lora_B['default_0'], 140533234976368) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].attn.to_k.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].attn.to_k.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[29].attn.to_k.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].attn.to_k.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[29].attn.to_k.base_layer, 140581770782672) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].attn.to_k.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].attn.to_k.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[29].attn.to_k.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].attn.to_k.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[29].attn.to_k.lora_dropout, 140533234987552) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].attn.to_k.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].attn.to_k.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[29].attn.to_k.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].attn.to_k.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[29].attn.to_k.lora_dropout['default_0'], 140533234985872) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].attn.to_k.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].attn.to_k.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[29].attn.to_k.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].attn.to_k.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[29].attn.to_k.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[29].attn.to_k.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].attn.to_k.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[29].attn.to_k.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].attn.to_k.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[29].attn.to_k.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[29].attn.to_k.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].attn.to_k.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[29].attn.to_k.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].attn.to_k._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].attn.to_k._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].attn.to_k.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[29].attn.to_k.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[29].attn.to_k.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].attn.to_k._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[29].attn.to_k._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].attn.to_k._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].attn.to_k._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].attn.to_k._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[29].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[29].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].attn.to_q, accessed_by=DictGetItemGuardAccessor(to_q) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[29].attn.to_q, 140533234981600) # query = attn.to_q(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1716 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].attn.to_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[29].attn.to_q.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].attn.to_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[29].attn.to_q.training, 140591004393408) # query = attn.to_q(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1716 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].attn.to_q._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].attn.to_q.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[29].attn.to_q.lora_A, 140533234982080) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].attn.to_q.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].attn.to_q.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[29].attn.to_q.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].attn.to_q.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[29].attn.to_q.lora_A['default_0'], 140533234979728) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].attn.to_q.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].attn.to_q.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[29].attn.to_q.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].attn.to_q.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].attn.to_q.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[29].attn.to_q.lora_A['default_0'].weight, 140537321775216) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].attn.to_q.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[29].attn.to_q.lora_B, 140533234986064) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].attn.to_q.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].attn.to_q.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[29].attn.to_q.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].attn.to_q.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[29].attn.to_q.lora_B['default_0'], 140533234988416) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].attn.to_q.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].attn.to_q.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[29].attn.to_q.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].attn.to_q.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[29].attn.to_q.base_layer, 140581770782768) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].attn.to_q.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].attn.to_q.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[29].attn.to_q.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].attn.to_q.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[29].attn.to_q.lora_dropout, 140533234988656) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].attn.to_q.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].attn.to_q.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[29].attn.to_q.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].attn.to_q.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[29].attn.to_q.lora_dropout['default_0'], 140533234976992) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].attn.to_q.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].attn.to_q.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[29].attn.to_q.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].attn.to_q.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[29].attn.to_q.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[29].attn.to_q.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].attn.to_q.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[29].attn.to_q.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].attn.to_q.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[29].attn.to_q.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[29].attn.to_q.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].attn.to_q.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[29].attn.to_q.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].attn.to_q._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].attn.to_q._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].attn.to_q.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[29].attn.to_q.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[29].attn.to_q.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].attn.to_q._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[29].attn.to_q._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].attn.to_q._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].attn.to_q._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].attn.to_q._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[29].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[29].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].attn.to_v, accessed_by=DictGetItemGuardAccessor(to_v) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[29].attn.to_v, 140533234980880) # value = attn.to_v(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1718 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].attn.to_v.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[29].attn.to_v.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].attn.to_v.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[29].attn.to_v.training, 140591004393408) # value = attn.to_v(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1718 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].attn.to_v._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].attn.to_v.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[29].attn.to_v.lora_A, 140533234979920) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].attn.to_v.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].attn.to_v.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[29].attn.to_v.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].attn.to_v.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[29].attn.to_v.lora_A['default_0'], 140533234975360) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].attn.to_v.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].attn.to_v.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[29].attn.to_v.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].attn.to_v.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].attn.to_v.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[29].attn.to_v.lora_A['default_0'].weight, 140537321777696) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].attn.to_v.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[29].attn.to_v.lora_B, 140533234983856) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].attn.to_v.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].attn.to_v.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[29].attn.to_v.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].attn.to_v.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[29].attn.to_v.lora_B['default_0'], 140533234975552) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].attn.to_v.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].attn.to_v.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[29].attn.to_v.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].attn.to_v.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[29].attn.to_v.base_layer, 140581770782816) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].attn.to_v.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].attn.to_v.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[29].attn.to_v.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].attn.to_v.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[29].attn.to_v.lora_dropout, 140533234978624) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].attn.to_v.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].attn.to_v.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[29].attn.to_v.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].attn.to_v.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[29].attn.to_v.lora_dropout['default_0'], 140533234980736) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].attn.to_v.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].attn.to_v.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[29].attn.to_v.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].attn.to_v.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[29].attn.to_v.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[29].attn.to_v.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].attn.to_v.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[29].attn.to_v.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].attn.to_v.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[29].attn.to_v.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[29].attn.to_v.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].attn.to_v.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[29].attn.to_v.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].attn.to_v._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].attn.to_v._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].attn.to_v.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[29].attn.to_v.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[29].attn.to_v.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].attn.to_v._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[29].attn.to_v._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].attn.to_v._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].attn.to_v._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].attn.to_v._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[29].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[29].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].attn.norm_k, accessed_by=DictGetItemGuardAccessor(norm_k) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[29].attn.norm_k, 140581770782720) # if attn.norm_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1729 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].attn.norm_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[29].attn.norm_k.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].attn.norm_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[29].attn.norm_k.training, 140591004393440) # if attn.norm_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1729 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].attn.norm_k.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[29].attn.norm_k.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].attn.norm_k._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].attn.norm_k.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[29].attn.norm_k.weight, 140581771023984) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].attn.norm_k._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].attn.norm_k._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].attn.norm_k._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].attn.norm_k._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].attn.norm_q, accessed_by=DictGetItemGuardAccessor(norm_q) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[29].attn.norm_q, 140581770782576) # if attn.norm_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1727 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].attn.norm_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[29].attn.norm_q.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].attn.norm_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[29].attn.norm_q.training, 140591004393440) # if attn.norm_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1727 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].attn.norm_q.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[29].attn.norm_q.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].attn.norm_q._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].attn.norm_q.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[29].attn.norm_q.weight, 140581783346352) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].attn.norm_q._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].attn.norm_q._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].attn.norm_q._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].attn.norm_q._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].attn.heads, accessed_by=DictGetItemGuardAccessor(heads) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[29].attn.heads == 24 # head_dim = inner_dim // attn.heads # diffusers/src/diffusers/models/attention_processor.py:1721 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].attn.processor, accessed_by=DictGetItemGuardAccessor(processor) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[29].attn.processor, 93831581524080) # attn_parameters = set(inspect.signature(self.processor.__call__).parameters.keys()) # diffusers/src/diffusers/models/attention_processor.py:479 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[29].attn.processor, 140581770782480) # return self.processor( # diffusers/src/diffusers/models/attention_processor.py:490 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].attn._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].attn._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].attn._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].attn._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].attn.forward, accessed_by=GetAttrGuardAccessor(forward) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].attn.forward, accessed_by=FuncDefaultsGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].attn.forward.__defaults__[0], accessed_by=GetItemGuardAccessor(0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[29].attn.forward.__defaults__[0], 140591004478624) # batch_size, _, _ = hidden_states.shape if encoder_hidden_states is None else encoder_hidden_states.shape # diffusers/src/diffusers/models/attention_processor.py:1713 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].norm, accessed_by=DictGetItemGuardAccessor(norm) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[29].norm, 140581770782144) # norm_hidden_states, gate = self.norm(hidden_states, emb=temb) # diffusers/src/diffusers/models/transformers/transformer_flux.py:88 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].norm.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[29].norm.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].norm.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[29].norm.training, 140591004393440) # norm_hidden_states, gate = self.norm(hidden_states, emb=temb) # diffusers/src/diffusers/models/transformers/transformer_flux.py:88 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].norm._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].norm.norm, accessed_by=DictGetItemGuardAccessor(norm) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[29].norm.norm, 140581770782288) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:171 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].norm.norm.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].norm.norm.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[29].norm.norm.training, 140591004393440) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:171 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].norm.silu, accessed_by=DictGetItemGuardAccessor(silu) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[29].norm.silu, 140581770782192) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].norm.silu.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].norm.silu.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[29].norm.silu.training, 140591004393440) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].norm.linear, accessed_by=DictGetItemGuardAccessor(linear) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[29].norm.linear, 140533233878256) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].norm.linear.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[29].norm.linear.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].norm.linear.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[29].norm.linear.training, 140591004393408) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].norm.linear._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].norm.linear.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[29].norm.linear.lora_A, 140533233884544) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].norm.linear.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].norm.linear.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[29].norm.linear.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].norm.linear.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[29].norm.linear.lora_A['default_0'], 140533233890208) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].norm.linear.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].norm.linear.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[29].norm.linear.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].norm.linear.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].norm.linear.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[29].norm.linear.lora_A['default_0'].weight, 140537321961280) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].norm.linear.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[29].norm.linear.lora_B, 140533233886464) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].norm.linear.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].norm.linear.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[29].norm.linear.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].norm.linear.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[29].norm.linear.lora_B['default_0'], 140533233891264) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].norm.linear.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].norm.linear.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[29].norm.linear.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].norm.linear.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[29].norm.linear.base_layer, 140581770782240) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].norm.linear.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].norm.linear.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[29].norm.linear.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].norm.linear.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[29].norm.linear.lora_dropout, 140533233892896) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].norm.linear.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].norm.linear.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[29].norm.linear.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].norm.linear.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[29].norm.linear.lora_dropout['default_0'], 140533233884736) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].norm.linear.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].norm.linear.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[29].norm.linear.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].norm.linear.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[29].norm.linear.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[29].norm.linear.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].norm.linear.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[29].norm.linear.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].norm.linear.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[29].norm.linear.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[29].norm.linear.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].norm.linear.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[29].norm.linear.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].norm.linear._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].norm.linear._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].norm.linear.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[29].norm.linear.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[29].norm.linear.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].norm.linear._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[29].norm.linear._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].norm.linear._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].norm.linear._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].norm.linear._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[29].norm.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[29].norm.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].norm._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].norm._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].norm._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].norm._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].act_mlp, accessed_by=DictGetItemGuardAccessor(act_mlp) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[29].act_mlp, 140581770782384) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].act_mlp.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].act_mlp.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[29].act_mlp.training, 140591004393440) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].proj_mlp, accessed_by=DictGetItemGuardAccessor(proj_mlp) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[29].proj_mlp, 140533233885120) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].proj_mlp.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[29].proj_mlp.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].proj_mlp.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[29].proj_mlp.training, 140591004393408) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].proj_mlp._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].proj_mlp.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[29].proj_mlp.lora_A, 140533233884016) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].proj_mlp.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].proj_mlp.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[29].proj_mlp.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].proj_mlp.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[29].proj_mlp.lora_A['default_0'], 140533233880800) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].proj_mlp.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].proj_mlp.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[29].proj_mlp.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].proj_mlp.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].proj_mlp.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[29].proj_mlp.lora_A['default_0'].weight, 140537321948080) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].proj_mlp.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[29].proj_mlp.lora_B, 140533233883920) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].proj_mlp.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].proj_mlp.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[29].proj_mlp.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].proj_mlp.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[29].proj_mlp.lora_B['default_0'], 140533233880560) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].proj_mlp.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].proj_mlp.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[29].proj_mlp.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].proj_mlp.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[29].proj_mlp.base_layer, 140581770782336) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].proj_mlp.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].proj_mlp.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[29].proj_mlp.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].proj_mlp.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[29].proj_mlp.lora_dropout, 140533233881952) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].proj_mlp.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].proj_mlp.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[29].proj_mlp.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].proj_mlp.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[29].proj_mlp.lora_dropout['default_0'], 140533233884400) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].proj_mlp.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].proj_mlp.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[29].proj_mlp.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].proj_mlp.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[29].proj_mlp.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[29].proj_mlp.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].proj_mlp.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[29].proj_mlp.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].proj_mlp.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[29].proj_mlp.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[29].proj_mlp.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].proj_mlp.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[29].proj_mlp.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].proj_mlp._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].proj_mlp._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].proj_mlp.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[29].proj_mlp.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[29].proj_mlp.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].proj_mlp._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[29].proj_mlp._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].proj_mlp._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].proj_mlp._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].proj_mlp._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[29].proj_mlp._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[29].proj_mlp._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].proj_out, accessed_by=DictGetItemGuardAccessor(proj_out) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[29].proj_out, 140533233878208) # hidden_states = gate * self.proj_out(hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:98 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].proj_out.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[29].proj_out.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].proj_out.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[29].proj_out.training, 140591004393408) # hidden_states = gate * self.proj_out(hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:98 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].proj_out._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].proj_out.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[29].proj_out.lora_A, 140533233888384) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].proj_out.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].proj_out.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[29].proj_out.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].proj_out.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[29].proj_out.lora_A['default_0'], 140533234981456) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].proj_out.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].proj_out.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[29].proj_out.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].proj_out.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].proj_out.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[29].proj_out.lora_A['default_0'].weight, 140537321772256) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].proj_out.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[29].proj_out.lora_B, 140533233885408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].proj_out.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].proj_out.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[29].proj_out.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].proj_out.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[29].proj_out.lora_B['default_0'], 140533234981216) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].proj_out.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].proj_out.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[29].proj_out.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].proj_out.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[29].proj_out.base_layer, 140581770782432) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].proj_out.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].proj_out.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[29].proj_out.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].proj_out.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[29].proj_out.lora_dropout, 140533233882144) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].proj_out.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].proj_out.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[29].proj_out.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].proj_out.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[29].proj_out.lora_dropout['default_0'], 140533233878064) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].proj_out.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].proj_out.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[29].proj_out.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].proj_out.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[29].proj_out.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[29].proj_out.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].proj_out.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[29].proj_out.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].proj_out.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[29].proj_out.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[29].proj_out.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].proj_out.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[29].proj_out.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].proj_out._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].proj_out._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].proj_out.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[29].proj_out.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[29].proj_out.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].proj_out._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[29].proj_out._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].proj_out._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].proj_out._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].proj_out._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[29].proj_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[29].proj_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30], accessed_by=GetItemGuardAccessor(30) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[30], 140581770782096) # for index_block, block in enumerate(self.single_transformer_blocks): # diffusers/src/diffusers/models/transformers/transformer_flux.py:509 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[30].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[30].training, 140591004393440) # for index_block, block in enumerate(self.single_transformer_blocks): # diffusers/src/diffusers/models/transformers/transformer_flux.py:509 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].attn, accessed_by=DictGetItemGuardAccessor(attn) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[30].attn, 140581770783296) # attn_output = self.attn( # diffusers/src/diffusers/models/transformers/transformer_flux.py:91 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].attn.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[30].attn.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].attn.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[30].attn.training, 140591004393440) # attn_output = self.attn( # diffusers/src/diffusers/models/transformers/transformer_flux.py:91 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].attn._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].attn.to_k, accessed_by=DictGetItemGuardAccessor(to_k) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[30].attn.to_k, 140533235392208) # key = attn.to_k(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1717 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].attn.to_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[30].attn.to_k.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].attn.to_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[30].attn.to_k.training, 140591004393408) # key = attn.to_k(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1717 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].attn.to_k._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].attn.to_k.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[30].attn.to_k.lora_A, 140533235393120) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].attn.to_k.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].attn.to_k.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[30].attn.to_k.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].attn.to_k.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[30].attn.to_k.lora_A['default_0'], 140533235554464) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].attn.to_k.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].attn.to_k.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[30].attn.to_k.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].attn.to_k.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].attn.to_k.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[30].attn.to_k.lora_A['default_0'].weight, 140537321660688) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].attn.to_k.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[30].attn.to_k.lora_B, 140533235554512) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].attn.to_k.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].attn.to_k.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[30].attn.to_k.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].attn.to_k.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[30].attn.to_k.lora_B['default_0'], 140533235560512) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].attn.to_k.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].attn.to_k.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[30].attn.to_k.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].attn.to_k.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[30].attn.to_k.base_layer, 140581770783440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].attn.to_k.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].attn.to_k.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[30].attn.to_k.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].attn.to_k.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[30].attn.to_k.lora_dropout, 140533235387456) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].attn.to_k.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].attn.to_k.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[30].attn.to_k.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].attn.to_k.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[30].attn.to_k.lora_dropout['default_0'], 140533235392880) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].attn.to_k.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].attn.to_k.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[30].attn.to_k.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].attn.to_k.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[30].attn.to_k.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[30].attn.to_k.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].attn.to_k.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[30].attn.to_k.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].attn.to_k.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[30].attn.to_k.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[30].attn.to_k.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].attn.to_k.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[30].attn.to_k.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].attn.to_k._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].attn.to_k._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].attn.to_k.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[30].attn.to_k.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[30].attn.to_k.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].attn.to_k._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[30].attn.to_k._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].attn.to_k._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].attn.to_k._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].attn.to_k._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[30].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[30].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].attn.to_q, accessed_by=DictGetItemGuardAccessor(to_q) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[30].attn.to_q, 140533235386880) # query = attn.to_q(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1716 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].attn.to_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[30].attn.to_q.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].attn.to_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[30].attn.to_q.training, 140591004393408) # query = attn.to_q(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1716 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].attn.to_q._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].attn.to_q.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[30].attn.to_q.lora_A, 140533235388128) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].attn.to_q.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].attn.to_q.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[30].attn.to_q.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].attn.to_q.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[30].attn.to_q.lora_A['default_0'], 140533235389904) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].attn.to_q.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].attn.to_q.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[30].attn.to_q.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].attn.to_q.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].attn.to_q.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[30].attn.to_q.lora_A['default_0'].weight, 140537321660928) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].attn.to_q.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[30].attn.to_q.lora_B, 140533235385824) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].attn.to_q.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].attn.to_q.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[30].attn.to_q.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].attn.to_q.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[30].attn.to_q.lora_B['default_0'], 140533235391680) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].attn.to_q.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].attn.to_q.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[30].attn.to_q.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].attn.to_q.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[30].attn.to_q.base_layer, 140581770783536) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].attn.to_q.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].attn.to_q.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[30].attn.to_q.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].attn.to_q.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[30].attn.to_q.lora_dropout, 140533235385488) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].attn.to_q.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].attn.to_q.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[30].attn.to_q.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].attn.to_q.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[30].attn.to_q.lora_dropout['default_0'], 140533235388560) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].attn.to_q.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].attn.to_q.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[30].attn.to_q.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].attn.to_q.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[30].attn.to_q.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[30].attn.to_q.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].attn.to_q.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[30].attn.to_q.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].attn.to_q.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[30].attn.to_q.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[30].attn.to_q.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].attn.to_q.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[30].attn.to_q.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].attn.to_q._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].attn.to_q._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].attn.to_q.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[30].attn.to_q.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[30].attn.to_q.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].attn.to_q._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[30].attn.to_q._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].attn.to_q._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].attn.to_q._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].attn.to_q._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[30].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[30].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].attn.to_v, accessed_by=DictGetItemGuardAccessor(to_v) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[30].attn.to_v, 140533235560704) # value = attn.to_v(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1718 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].attn.to_v.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[30].attn.to_v.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].attn.to_v.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[30].attn.to_v.training, 140591004393408) # value = attn.to_v(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1718 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].attn.to_v._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].attn.to_v.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[30].attn.to_v.lora_A, 140533235560992) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].attn.to_v.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].attn.to_v.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[30].attn.to_v.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].attn.to_v.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[30].attn.to_v.lora_A['default_0'], 140533235557488) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].attn.to_v.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].attn.to_v.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[30].attn.to_v.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].attn.to_v.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].attn.to_v.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[30].attn.to_v.lora_A['default_0'].weight, 140537321667328) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].attn.to_v.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[30].attn.to_v.lora_B, 140533235561088) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].attn.to_v.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].attn.to_v.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[30].attn.to_v.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].attn.to_v.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[30].attn.to_v.lora_B['default_0'], 140533235554656) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].attn.to_v.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].attn.to_v.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[30].attn.to_v.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].attn.to_v.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[30].attn.to_v.base_layer, 140581770783584) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].attn.to_v.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].attn.to_v.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[30].attn.to_v.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].attn.to_v.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[30].attn.to_v.lora_dropout, 140533235552016) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].attn.to_v.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].attn.to_v.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[30].attn.to_v.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].attn.to_v.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[30].attn.to_v.lora_dropout['default_0'], 140533235560656) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].attn.to_v.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].attn.to_v.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[30].attn.to_v.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].attn.to_v.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[30].attn.to_v.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[30].attn.to_v.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].attn.to_v.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[30].attn.to_v.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].attn.to_v.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[30].attn.to_v.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[30].attn.to_v.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].attn.to_v.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[30].attn.to_v.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].attn.to_v._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].attn.to_v._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].attn.to_v.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[30].attn.to_v.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[30].attn.to_v.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].attn.to_v._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[30].attn.to_v._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].attn.to_v._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].attn.to_v._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].attn.to_v._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[30].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[30].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].attn.norm_k, accessed_by=DictGetItemGuardAccessor(norm_k) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[30].attn.norm_k, 140581770783488) # if attn.norm_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1729 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].attn.norm_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[30].attn.norm_k.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].attn.norm_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[30].attn.norm_k.training, 140591004393440) # if attn.norm_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1729 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].attn.norm_k.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[30].attn.norm_k.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].attn.norm_k._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].attn.norm_k.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[30].attn.norm_k.weight, 140581772499904) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].attn.norm_k._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].attn.norm_k._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].attn.norm_k._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].attn.norm_k._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].attn.norm_q, accessed_by=DictGetItemGuardAccessor(norm_q) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[30].attn.norm_q, 140581770783344) # if attn.norm_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1727 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].attn.norm_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[30].attn.norm_q.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].attn.norm_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[30].attn.norm_q.training, 140591004393440) # if attn.norm_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1727 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].attn.norm_q.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[30].attn.norm_q.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].attn.norm_q._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].attn.norm_q.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[30].attn.norm_q.weight, 140581766117024) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].attn.norm_q._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].attn.norm_q._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].attn.norm_q._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].attn.norm_q._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].attn.heads, accessed_by=DictGetItemGuardAccessor(heads) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[30].attn.heads == 24 # head_dim = inner_dim // attn.heads # diffusers/src/diffusers/models/attention_processor.py:1721 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].attn.processor, accessed_by=DictGetItemGuardAccessor(processor) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[30].attn.processor, 93831581524080) # attn_parameters = set(inspect.signature(self.processor.__call__).parameters.keys()) # diffusers/src/diffusers/models/attention_processor.py:479 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[30].attn.processor, 140581770783248) # return self.processor( # diffusers/src/diffusers/models/attention_processor.py:490 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].attn._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].attn._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].attn._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].attn._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].attn.forward, accessed_by=GetAttrGuardAccessor(forward) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].attn.forward, accessed_by=FuncDefaultsGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].attn.forward.__defaults__[0], accessed_by=GetItemGuardAccessor(0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[30].attn.forward.__defaults__[0], 140591004478624) # batch_size, _, _ = hidden_states.shape if encoder_hidden_states is None else encoder_hidden_states.shape # diffusers/src/diffusers/models/attention_processor.py:1713 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].norm, accessed_by=DictGetItemGuardAccessor(norm) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[30].norm, 140581770782912) # norm_hidden_states, gate = self.norm(hidden_states, emb=temb) # diffusers/src/diffusers/models/transformers/transformer_flux.py:88 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].norm.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[30].norm.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].norm.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[30].norm.training, 140591004393440) # norm_hidden_states, gate = self.norm(hidden_states, emb=temb) # diffusers/src/diffusers/models/transformers/transformer_flux.py:88 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].norm._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].norm.norm, accessed_by=DictGetItemGuardAccessor(norm) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[30].norm.norm, 140581770783056) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:171 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].norm.norm.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].norm.norm.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[30].norm.norm.training, 140591004393440) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:171 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].norm.silu, accessed_by=DictGetItemGuardAccessor(silu) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[30].norm.silu, 140581770782960) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].norm.silu.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].norm.silu.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[30].norm.silu.training, 140591004393440) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].norm.linear, accessed_by=DictGetItemGuardAccessor(linear) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[30].norm.linear, 140533234976464) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].norm.linear.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[30].norm.linear.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].norm.linear.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[30].norm.linear.training, 140591004393408) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].norm.linear._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].norm.linear.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[30].norm.linear.lora_A, 140533234978384) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].norm.linear.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].norm.linear.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[30].norm.linear.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].norm.linear.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[30].norm.linear.lora_A['default_0'], 140533234986544) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].norm.linear.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].norm.linear.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[30].norm.linear.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].norm.linear.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].norm.linear.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[30].norm.linear.lora_A['default_0'].weight, 140537321770976) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].norm.linear.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[30].norm.linear.lora_B, 140533234979008) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].norm.linear.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].norm.linear.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[30].norm.linear.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].norm.linear.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[30].norm.linear.lora_B['default_0'], 140533235395088) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].norm.linear.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].norm.linear.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[30].norm.linear.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].norm.linear.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[30].norm.linear.base_layer, 140581770783008) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].norm.linear.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].norm.linear.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[30].norm.linear.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].norm.linear.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[30].norm.linear.lora_dropout, 140533234975744) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].norm.linear.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].norm.linear.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[30].norm.linear.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].norm.linear.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[30].norm.linear.lora_dropout['default_0'], 140533234977904) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].norm.linear.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].norm.linear.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[30].norm.linear.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].norm.linear.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[30].norm.linear.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[30].norm.linear.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].norm.linear.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[30].norm.linear.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].norm.linear.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[30].norm.linear.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[30].norm.linear.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].norm.linear.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[30].norm.linear.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].norm.linear._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].norm.linear._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].norm.linear.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[30].norm.linear.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[30].norm.linear.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].norm.linear._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[30].norm.linear._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].norm.linear._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].norm.linear._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].norm.linear._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[30].norm.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[30].norm.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].norm._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].norm._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].norm._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].norm._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].act_mlp, accessed_by=DictGetItemGuardAccessor(act_mlp) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[30].act_mlp, 140581770783152) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].act_mlp.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].act_mlp.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[30].act_mlp.training, 140591004393440) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].proj_mlp, accessed_by=DictGetItemGuardAccessor(proj_mlp) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[30].proj_mlp, 140533235399456) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].proj_mlp.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[30].proj_mlp.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].proj_mlp.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[30].proj_mlp.training, 140591004393408) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].proj_mlp._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].proj_mlp.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[30].proj_mlp.lora_A, 140533235399936) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].proj_mlp.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].proj_mlp.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[30].proj_mlp.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].proj_mlp.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[30].proj_mlp.lora_A['default_0'], 140533235392736) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].proj_mlp.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].proj_mlp.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[30].proj_mlp.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].proj_mlp.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].proj_mlp.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[30].proj_mlp.lora_A['default_0'].weight, 140537321653568) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].proj_mlp.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[30].proj_mlp.lora_B, 140533235398496) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].proj_mlp.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].proj_mlp.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[30].proj_mlp.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].proj_mlp.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[30].proj_mlp.lora_B['default_0'], 140533235392496) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].proj_mlp.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].proj_mlp.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[30].proj_mlp.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].proj_mlp.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[30].proj_mlp.base_layer, 140581770783104) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].proj_mlp.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].proj_mlp.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[30].proj_mlp.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].proj_mlp.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[30].proj_mlp.lora_dropout, 140533235398592) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].proj_mlp.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].proj_mlp.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[30].proj_mlp.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].proj_mlp.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[30].proj_mlp.lora_dropout['default_0'], 140533235397536) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].proj_mlp.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].proj_mlp.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[30].proj_mlp.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].proj_mlp.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[30].proj_mlp.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[30].proj_mlp.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].proj_mlp.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[30].proj_mlp.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].proj_mlp.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[30].proj_mlp.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[30].proj_mlp.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].proj_mlp.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[30].proj_mlp.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].proj_mlp._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].proj_mlp._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].proj_mlp.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[30].proj_mlp.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[30].proj_mlp.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].proj_mlp._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[30].proj_mlp._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].proj_mlp._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].proj_mlp._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].proj_mlp._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[30].proj_mlp._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[30].proj_mlp._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].proj_out, accessed_by=DictGetItemGuardAccessor(proj_out) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[30].proj_out, 140533235388800) # hidden_states = gate * self.proj_out(hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:98 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].proj_out.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[30].proj_out.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].proj_out.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[30].proj_out.training, 140591004393408) # hidden_states = gate * self.proj_out(hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:98 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].proj_out._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].proj_out.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[30].proj_out.lora_A, 140533235394800) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].proj_out.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].proj_out.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[30].proj_out.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].proj_out.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[30].proj_out.lora_A['default_0'], 140533235386976) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].proj_out.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].proj_out.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[30].proj_out.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].proj_out.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].proj_out.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[30].proj_out.lora_A['default_0'].weight, 140537321666528) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].proj_out.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[30].proj_out.lora_B, 140533235392304) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].proj_out.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].proj_out.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[30].proj_out.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].proj_out.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[30].proj_out.lora_B['default_0'], 140533235385872) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].proj_out.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].proj_out.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[30].proj_out.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].proj_out.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[30].proj_out.base_layer, 140581770783200) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].proj_out.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].proj_out.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[30].proj_out.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].proj_out.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[30].proj_out.lora_dropout, 140533235388176) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].proj_out.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].proj_out.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[30].proj_out.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].proj_out.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[30].proj_out.lora_dropout['default_0'], 140533235386928) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].proj_out.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].proj_out.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[30].proj_out.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].proj_out.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[30].proj_out.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[30].proj_out.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].proj_out.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[30].proj_out.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].proj_out.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[30].proj_out.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[30].proj_out.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].proj_out.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[30].proj_out.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].proj_out._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].proj_out._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].proj_out.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[30].proj_out.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[30].proj_out.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].proj_out._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[30].proj_out._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].proj_out._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].proj_out._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].proj_out._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[30].proj_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[30].proj_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31], accessed_by=GetItemGuardAccessor(31) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[31], 140581770782864) # for index_block, block in enumerate(self.single_transformer_blocks): # diffusers/src/diffusers/models/transformers/transformer_flux.py:509 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[31].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[31].training, 140591004393440) # for index_block, block in enumerate(self.single_transformer_blocks): # diffusers/src/diffusers/models/transformers/transformer_flux.py:509 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].attn, accessed_by=DictGetItemGuardAccessor(attn) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[31].attn, 140581770784064) # attn_output = self.attn( # diffusers/src/diffusers/models/transformers/transformer_flux.py:91 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].attn.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[31].attn.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].attn.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[31].attn.training, 140591004393440) # attn_output = self.attn( # diffusers/src/diffusers/models/transformers/transformer_flux.py:91 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].attn._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].attn.to_k, accessed_by=DictGetItemGuardAccessor(to_k) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[31].attn.to_k, 140533235850192) # key = attn.to_k(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1717 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].attn.to_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[31].attn.to_k.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].attn.to_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[31].attn.to_k.training, 140591004393408) # key = attn.to_k(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1717 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].attn.to_k._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].attn.to_k.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[31].attn.to_k.lora_A, 140533235843424) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].attn.to_k.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].attn.to_k.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[31].attn.to_k.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].attn.to_k.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[31].attn.to_k.lora_A['default_0'], 140533235847648) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].attn.to_k.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].attn.to_k.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[31].attn.to_k.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].attn.to_k.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].attn.to_k.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[31].attn.to_k.lora_A['default_0'].weight, 140537321533216) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].attn.to_k.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[31].attn.to_k.lora_B, 140533235847696) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].attn.to_k.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].attn.to_k.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[31].attn.to_k.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].attn.to_k.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[31].attn.to_k.lora_B['default_0'], 140533235845248) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].attn.to_k.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].attn.to_k.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[31].attn.to_k.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].attn.to_k.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[31].attn.to_k.base_layer, 140581770784208) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].attn.to_k.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].attn.to_k.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[31].attn.to_k.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].attn.to_k.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[31].attn.to_k.lora_dropout, 140533235844960) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].attn.to_k.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].attn.to_k.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[31].attn.to_k.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].attn.to_k.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[31].attn.to_k.lora_dropout['default_0'], 140533235850576) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].attn.to_k.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].attn.to_k.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[31].attn.to_k.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].attn.to_k.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[31].attn.to_k.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[31].attn.to_k.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].attn.to_k.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[31].attn.to_k.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].attn.to_k.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[31].attn.to_k.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[31].attn.to_k.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].attn.to_k.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[31].attn.to_k.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].attn.to_k._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].attn.to_k._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].attn.to_k.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[31].attn.to_k.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[31].attn.to_k.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].attn.to_k._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[31].attn.to_k._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].attn.to_k._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].attn.to_k._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].attn.to_k._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[31].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[31].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].attn.to_q, accessed_by=DictGetItemGuardAccessor(to_q) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[31].attn.to_q, 140533235851104) # query = attn.to_q(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1716 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].attn.to_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[31].attn.to_q.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].attn.to_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[31].attn.to_q.training, 140591004393408) # query = attn.to_q(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1716 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].attn.to_q._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].attn.to_q.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[31].attn.to_q.lora_A, 140533235856048) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].attn.to_q.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].attn.to_q.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[31].attn.to_q.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].attn.to_q.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[31].attn.to_q.lora_A['default_0'], 140533235852448) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].attn.to_q.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].attn.to_q.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[31].attn.to_q.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].attn.to_q.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].attn.to_q.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[31].attn.to_q.lora_A['default_0'].weight, 140537321535216) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].attn.to_q.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[31].attn.to_q.lora_B, 140533235850432) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].attn.to_q.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].attn.to_q.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[31].attn.to_q.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].attn.to_q.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[31].attn.to_q.lora_B['default_0'], 140533235853024) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].attn.to_q.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].attn.to_q.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[31].attn.to_q.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].attn.to_q.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[31].attn.to_q.base_layer, 140581770784304) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].attn.to_q.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].attn.to_q.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[31].attn.to_q.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].attn.to_q.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[31].attn.to_q.lora_dropout, 140533235848224) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].attn.to_q.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].attn.to_q.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[31].attn.to_q.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].attn.to_q.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[31].attn.to_q.lora_dropout['default_0'], 140533235854704) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].attn.to_q.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].attn.to_q.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[31].attn.to_q.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].attn.to_q.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[31].attn.to_q.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[31].attn.to_q.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].attn.to_q.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[31].attn.to_q.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].attn.to_q.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[31].attn.to_q.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[31].attn.to_q.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].attn.to_q.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[31].attn.to_q.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].attn.to_q._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].attn.to_q._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].attn.to_q.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[31].attn.to_q.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[31].attn.to_q.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].attn.to_q._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[31].attn.to_q._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].attn.to_q._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].attn.to_q._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].attn.to_q._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[31].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[31].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].attn.to_v, accessed_by=DictGetItemGuardAccessor(to_v) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[31].attn.to_v, 140533235850048) # value = attn.to_v(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1718 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].attn.to_v.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[31].attn.to_v.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].attn.to_v.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[31].attn.to_v.training, 140591004393408) # value = attn.to_v(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1718 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].attn.to_v._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].attn.to_v.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[31].attn.to_v.lora_A, 140533235856096) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].attn.to_v.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].attn.to_v.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[31].attn.to_v.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].attn.to_v.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[31].attn.to_v.lora_A['default_0'], 140533235846976) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].attn.to_v.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].attn.to_v.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[31].attn.to_v.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].attn.to_v.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].attn.to_v.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[31].attn.to_v.lora_A['default_0'].weight, 140537321526416) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].attn.to_v.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[31].attn.to_v.lora_B, 140533235854416) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].attn.to_v.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].attn.to_v.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[31].attn.to_v.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].attn.to_v.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[31].attn.to_v.lora_B['default_0'], 140533235847024) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].attn.to_v.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].attn.to_v.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[31].attn.to_v.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].attn.to_v.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[31].attn.to_v.base_layer, 140581770784352) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].attn.to_v.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].attn.to_v.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[31].attn.to_v.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].attn.to_v.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[31].attn.to_v.lora_dropout, 140533235849040) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].attn.to_v.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].attn.to_v.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[31].attn.to_v.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].attn.to_v.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[31].attn.to_v.lora_dropout['default_0'], 140533235852400) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].attn.to_v.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].attn.to_v.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[31].attn.to_v.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].attn.to_v.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[31].attn.to_v.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[31].attn.to_v.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].attn.to_v.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[31].attn.to_v.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].attn.to_v.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[31].attn.to_v.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[31].attn.to_v.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].attn.to_v.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[31].attn.to_v.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].attn.to_v._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].attn.to_v._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].attn.to_v.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[31].attn.to_v.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[31].attn.to_v.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].attn.to_v._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[31].attn.to_v._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].attn.to_v._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].attn.to_v._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].attn.to_v._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[31].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[31].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].attn.norm_k, accessed_by=DictGetItemGuardAccessor(norm_k) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[31].attn.norm_k, 140581770784256) # if attn.norm_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1729 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].attn.norm_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[31].attn.norm_k.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].attn.norm_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[31].attn.norm_k.training, 140591004393440) # if attn.norm_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1729 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].attn.norm_k.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[31].attn.norm_k.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].attn.norm_k._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].attn.norm_k.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[31].attn.norm_k.weight, 140581783347232) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].attn.norm_k._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].attn.norm_k._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].attn.norm_k._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].attn.norm_k._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].attn.norm_q, accessed_by=DictGetItemGuardAccessor(norm_q) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[31].attn.norm_q, 140581770784112) # if attn.norm_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1727 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].attn.norm_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[31].attn.norm_q.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].attn.norm_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[31].attn.norm_q.training, 140591004393440) # if attn.norm_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1727 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].attn.norm_q.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[31].attn.norm_q.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].attn.norm_q._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].attn.norm_q.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[31].attn.norm_q.weight, 140581766104784) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].attn.norm_q._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].attn.norm_q._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].attn.norm_q._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].attn.norm_q._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].attn.heads, accessed_by=DictGetItemGuardAccessor(heads) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[31].attn.heads == 24 # head_dim = inner_dim // attn.heads # diffusers/src/diffusers/models/attention_processor.py:1721 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].attn.processor, accessed_by=DictGetItemGuardAccessor(processor) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[31].attn.processor, 93831581524080) # attn_parameters = set(inspect.signature(self.processor.__call__).parameters.keys()) # diffusers/src/diffusers/models/attention_processor.py:479 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[31].attn.processor, 140581770784016) # return self.processor( # diffusers/src/diffusers/models/attention_processor.py:490 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].attn._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].attn._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].attn._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].attn._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].attn.forward, accessed_by=GetAttrGuardAccessor(forward) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].attn.forward, accessed_by=FuncDefaultsGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].attn.forward.__defaults__[0], accessed_by=GetItemGuardAccessor(0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[31].attn.forward.__defaults__[0], 140591004478624) # batch_size, _, _ = hidden_states.shape if encoder_hidden_states is None else encoder_hidden_states.shape # diffusers/src/diffusers/models/attention_processor.py:1713 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].norm, accessed_by=DictGetItemGuardAccessor(norm) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[31].norm, 140581770783680) # norm_hidden_states, gate = self.norm(hidden_states, emb=temb) # diffusers/src/diffusers/models/transformers/transformer_flux.py:88 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].norm.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[31].norm.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].norm.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[31].norm.training, 140591004393440) # norm_hidden_states, gate = self.norm(hidden_states, emb=temb) # diffusers/src/diffusers/models/transformers/transformer_flux.py:88 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].norm._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].norm.norm, accessed_by=DictGetItemGuardAccessor(norm) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[31].norm.norm, 140581770783824) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:171 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].norm.norm.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].norm.norm.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[31].norm.norm.training, 140591004393440) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:171 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].norm.silu, accessed_by=DictGetItemGuardAccessor(silu) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[31].norm.silu, 140581770783728) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].norm.silu.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].norm.silu.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[31].norm.silu.training, 140591004393440) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].norm.linear, accessed_by=DictGetItemGuardAccessor(linear) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[31].norm.linear, 140533235557872) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].norm.linear.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[31].norm.linear.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].norm.linear.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[31].norm.linear.training, 140591004393408) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].norm.linear._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].norm.linear.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[31].norm.linear.lora_A, 140533235557296) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].norm.linear.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].norm.linear.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[31].norm.linear.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].norm.linear.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[31].norm.linear.lora_A['default_0'], 140533235550816) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].norm.linear.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].norm.linear.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[31].norm.linear.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].norm.linear.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].norm.linear.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[31].norm.linear.lora_A['default_0'].weight, 140537321665968) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].norm.linear.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[31].norm.linear.lora_B, 140533235557584) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].norm.linear.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].norm.linear.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[31].norm.linear.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].norm.linear.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[31].norm.linear.lora_B['default_0'], 140533235549328) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].norm.linear.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].norm.linear.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[31].norm.linear.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].norm.linear.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[31].norm.linear.base_layer, 140581770783776) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].norm.linear.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].norm.linear.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[31].norm.linear.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].norm.linear.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[31].norm.linear.lora_dropout, 140533235556432) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].norm.linear.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].norm.linear.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[31].norm.linear.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].norm.linear.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[31].norm.linear.lora_dropout['default_0'], 140533235555184) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].norm.linear.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].norm.linear.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[31].norm.linear.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].norm.linear.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[31].norm.linear.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[31].norm.linear.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].norm.linear.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[31].norm.linear.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].norm.linear.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[31].norm.linear.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[31].norm.linear.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].norm.linear.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[31].norm.linear.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].norm.linear._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].norm.linear._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].norm.linear.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[31].norm.linear.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[31].norm.linear.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].norm.linear._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[31].norm.linear._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].norm.linear._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].norm.linear._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].norm.linear._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[31].norm.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[31].norm.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].norm._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].norm._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].norm._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].norm._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].act_mlp, accessed_by=DictGetItemGuardAccessor(act_mlp) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[31].act_mlp, 140581770783920) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].act_mlp.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].act_mlp.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[31].act_mlp.training, 140591004393440) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].proj_mlp, accessed_by=DictGetItemGuardAccessor(proj_mlp) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[31].proj_mlp, 140533235555424) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].proj_mlp.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[31].proj_mlp.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].proj_mlp.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[31].proj_mlp.training, 140591004393408) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].proj_mlp._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].proj_mlp.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[31].proj_mlp.lora_A, 140533235553888) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].proj_mlp.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].proj_mlp.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[31].proj_mlp.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].proj_mlp.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[31].proj_mlp.lora_A['default_0'], 140533235552976) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].proj_mlp.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].proj_mlp.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[31].proj_mlp.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].proj_mlp.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].proj_mlp.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[31].proj_mlp.lora_A['default_0'].weight, 140537321658528) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].proj_mlp.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[31].proj_mlp.lora_B, 140533235548224) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].proj_mlp.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].proj_mlp.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[31].proj_mlp.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].proj_mlp.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[31].proj_mlp.lora_B['default_0'], 140533235555904) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].proj_mlp.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].proj_mlp.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[31].proj_mlp.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].proj_mlp.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[31].proj_mlp.base_layer, 140581770783872) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].proj_mlp.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].proj_mlp.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[31].proj_mlp.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].proj_mlp.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[31].proj_mlp.lora_dropout, 140533235553792) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].proj_mlp.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].proj_mlp.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[31].proj_mlp.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].proj_mlp.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[31].proj_mlp.lora_dropout['default_0'], 140533235554704) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].proj_mlp.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].proj_mlp.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[31].proj_mlp.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].proj_mlp.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[31].proj_mlp.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[31].proj_mlp.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].proj_mlp.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[31].proj_mlp.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].proj_mlp.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[31].proj_mlp.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[31].proj_mlp.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].proj_mlp.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[31].proj_mlp.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].proj_mlp._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].proj_mlp._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].proj_mlp.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[31].proj_mlp.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[31].proj_mlp.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].proj_mlp._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[31].proj_mlp._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].proj_mlp._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].proj_mlp._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].proj_mlp._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[31].proj_mlp._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[31].proj_mlp._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].proj_out, accessed_by=DictGetItemGuardAccessor(proj_out) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[31].proj_out, 140533232480272) # hidden_states = gate * self.proj_out(hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:98 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].proj_out.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[31].proj_out.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].proj_out.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[31].proj_out.training, 140591004393408) # hidden_states = gate * self.proj_out(hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:98 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].proj_out._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].proj_out.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[31].proj_out.lora_A, 140533235855856) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].proj_out.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].proj_out.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[31].proj_out.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].proj_out.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[31].proj_out.lora_A['default_0'], 140533235848512) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].proj_out.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].proj_out.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[31].proj_out.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].proj_out.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].proj_out.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[31].proj_out.lora_A['default_0'].weight, 140537321656368) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].proj_out.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[31].proj_out.lora_B, 140533235848608) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].proj_out.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].proj_out.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[31].proj_out.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].proj_out.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[31].proj_out.lora_B['default_0'], 140533235859072) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].proj_out.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].proj_out.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[31].proj_out.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].proj_out.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[31].proj_out.base_layer, 140581770783968) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].proj_out.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].proj_out.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[31].proj_out.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].proj_out.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[31].proj_out.lora_dropout, 140533235853168) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].proj_out.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].proj_out.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[31].proj_out.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].proj_out.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[31].proj_out.lora_dropout['default_0'], 140533235849712) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].proj_out.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].proj_out.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[31].proj_out.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].proj_out.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[31].proj_out.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[31].proj_out.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].proj_out.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[31].proj_out.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].proj_out.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[31].proj_out.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[31].proj_out.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].proj_out.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[31].proj_out.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].proj_out._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].proj_out._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].proj_out.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[31].proj_out.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[31].proj_out.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].proj_out._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[31].proj_out._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].proj_out._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].proj_out._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].proj_out._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[31].proj_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[31].proj_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32], accessed_by=GetItemGuardAccessor(32) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[32], 140581770783632) # for index_block, block in enumerate(self.single_transformer_blocks): # diffusers/src/diffusers/models/transformers/transformer_flux.py:509 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[32].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[32].training, 140591004393440) # for index_block, block in enumerate(self.single_transformer_blocks): # diffusers/src/diffusers/models/transformers/transformer_flux.py:509 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].attn, accessed_by=DictGetItemGuardAccessor(attn) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[32].attn, 140581770784832) # attn_output = self.attn( # diffusers/src/diffusers/models/transformers/transformer_flux.py:91 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].attn.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[32].attn.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].attn.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[32].attn.training, 140591004393440) # attn_output = self.attn( # diffusers/src/diffusers/models/transformers/transformer_flux.py:91 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].attn._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].attn.to_k, accessed_by=DictGetItemGuardAccessor(to_k) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[32].attn.to_k, 140533236919136) # key = attn.to_k(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1717 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].attn.to_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[32].attn.to_k.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].attn.to_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[32].attn.to_k.training, 140591004393408) # key = attn.to_k(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1717 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].attn.to_k._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].attn.to_k.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[32].attn.to_k.lora_A, 140533236910016) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].attn.to_k.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].attn.to_k.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[32].attn.to_k.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].attn.to_k.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[32].attn.to_k.lora_A['default_0'], 140533236921152) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].attn.to_k.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].attn.to_k.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[32].attn.to_k.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].attn.to_k.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].attn.to_k.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[32].attn.to_k.lora_A['default_0'].weight, 140537323486272) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].attn.to_k.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[32].attn.to_k.lora_B, 140533236922160) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].attn.to_k.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].attn.to_k.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[32].attn.to_k.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].attn.to_k.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[32].attn.to_k.lora_B['default_0'], 140533236920432) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].attn.to_k.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].attn.to_k.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[32].attn.to_k.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].attn.to_k.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[32].attn.to_k.base_layer, 140581770784976) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].attn.to_k.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].attn.to_k.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[32].attn.to_k.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].attn.to_k.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[32].attn.to_k.lora_dropout, 140533236920576) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].attn.to_k.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].attn.to_k.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[32].attn.to_k.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].attn.to_k.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[32].attn.to_k.lora_dropout['default_0'], 140533236921104) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].attn.to_k.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].attn.to_k.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[32].attn.to_k.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].attn.to_k.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[32].attn.to_k.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[32].attn.to_k.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].attn.to_k.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[32].attn.to_k.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].attn.to_k.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[32].attn.to_k.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[32].attn.to_k.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].attn.to_k.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[32].attn.to_k.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].attn.to_k._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].attn.to_k._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].attn.to_k.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[32].attn.to_k.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[32].attn.to_k.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].attn.to_k._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[32].attn.to_k._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].attn.to_k._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].attn.to_k._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].attn.to_k._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[32].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[32].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].attn.to_q, accessed_by=DictGetItemGuardAccessor(to_q) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[32].attn.to_q, 140533236777984) # query = attn.to_q(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1716 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].attn.to_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[32].attn.to_q.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].attn.to_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[32].attn.to_q.training, 140591004393408) # query = attn.to_q(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1716 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].attn.to_q._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].attn.to_q.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[32].attn.to_q.lora_A, 140533236781200) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].attn.to_q.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].attn.to_q.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[32].attn.to_q.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].attn.to_q.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[32].attn.to_q.lora_A['default_0'], 140533236923120) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].attn.to_q.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].attn.to_q.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[32].attn.to_q.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].attn.to_q.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].attn.to_q.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[32].attn.to_q.lora_A['default_0'].weight, 140537323485392) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].attn.to_q.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[32].attn.to_q.lora_B, 140533236782928) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].attn.to_q.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].attn.to_q.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[32].attn.to_q.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].attn.to_q.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[32].attn.to_q.lora_B['default_0'], 140533236913280) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].attn.to_q.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].attn.to_q.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[32].attn.to_q.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].attn.to_q.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[32].attn.to_q.base_layer, 140581770785072) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].attn.to_q.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].attn.to_q.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[32].attn.to_q.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].attn.to_q.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[32].attn.to_q.lora_dropout, 140533236782112) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].attn.to_q.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].attn.to_q.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[32].attn.to_q.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].attn.to_q.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[32].attn.to_q.lora_dropout['default_0'], 140533236781104) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].attn.to_q.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].attn.to_q.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[32].attn.to_q.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].attn.to_q.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[32].attn.to_q.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[32].attn.to_q.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].attn.to_q.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[32].attn.to_q.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].attn.to_q.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[32].attn.to_q.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[32].attn.to_q.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].attn.to_q.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[32].attn.to_q.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].attn.to_q._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].attn.to_q._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].attn.to_q.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[32].attn.to_q.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[32].attn.to_q.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].attn.to_q._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[32].attn.to_q._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].attn.to_q._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].attn.to_q._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].attn.to_q._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[32].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[32].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].attn.to_v, accessed_by=DictGetItemGuardAccessor(to_v) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[32].attn.to_v, 140533236922736) # value = attn.to_v(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1718 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].attn.to_v.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[32].attn.to_v.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].attn.to_v.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[32].attn.to_v.training, 140591004393408) # value = attn.to_v(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1718 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].attn.to_v._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].attn.to_v.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[32].attn.to_v.lora_A, 140533236921776) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].attn.to_v.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].attn.to_v.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[32].attn.to_v.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].attn.to_v.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[32].attn.to_v.lora_A['default_0'], 140533236913664) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].attn.to_v.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].attn.to_v.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[32].attn.to_v.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].attn.to_v.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].attn.to_v.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[32].attn.to_v.lora_A['default_0'].weight, 140537323484832) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].attn.to_v.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[32].attn.to_v.lora_B, 140533236914528) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].attn.to_v.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].attn.to_v.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[32].attn.to_v.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].attn.to_v.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[32].attn.to_v.lora_B['default_0'], 140533236917360) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].attn.to_v.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].attn.to_v.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[32].attn.to_v.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].attn.to_v.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[32].attn.to_v.base_layer, 140581770785120) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].attn.to_v.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].attn.to_v.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[32].attn.to_v.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].attn.to_v.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[32].attn.to_v.lora_dropout, 140533236923216) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].attn.to_v.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].attn.to_v.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[32].attn.to_v.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].attn.to_v.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[32].attn.to_v.lora_dropout['default_0'], 140533236923264) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].attn.to_v.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].attn.to_v.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[32].attn.to_v.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].attn.to_v.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[32].attn.to_v.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[32].attn.to_v.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].attn.to_v.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[32].attn.to_v.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].attn.to_v.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[32].attn.to_v.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[32].attn.to_v.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].attn.to_v.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[32].attn.to_v.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].attn.to_v._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].attn.to_v._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].attn.to_v.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[32].attn.to_v.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[32].attn.to_v.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].attn.to_v._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[32].attn.to_v._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].attn.to_v._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].attn.to_v._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].attn.to_v._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[32].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[32].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].attn.norm_k, accessed_by=DictGetItemGuardAccessor(norm_k) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[32].attn.norm_k, 140581770785024) # if attn.norm_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1729 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].attn.norm_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[32].attn.norm_k.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].attn.norm_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[32].attn.norm_k.training, 140591004393440) # if attn.norm_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1729 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].attn.norm_k.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[32].attn.norm_k.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].attn.norm_k._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].attn.norm_k.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[32].attn.norm_k.weight, 140581773255808) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].attn.norm_k._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].attn.norm_k._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].attn.norm_k._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].attn.norm_k._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].attn.norm_q, accessed_by=DictGetItemGuardAccessor(norm_q) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[32].attn.norm_q, 140581770784880) # if attn.norm_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1727 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].attn.norm_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[32].attn.norm_q.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].attn.norm_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[32].attn.norm_q.training, 140591004393440) # if attn.norm_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1727 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].attn.norm_q.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[32].attn.norm_q.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].attn.norm_q._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].attn.norm_q.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[32].attn.norm_q.weight, 140581783349712) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].attn.norm_q._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].attn.norm_q._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].attn.norm_q._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].attn.norm_q._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].attn.heads, accessed_by=DictGetItemGuardAccessor(heads) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[32].attn.heads == 24 # head_dim = inner_dim // attn.heads # diffusers/src/diffusers/models/attention_processor.py:1721 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].attn.processor, accessed_by=DictGetItemGuardAccessor(processor) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[32].attn.processor, 93831581524080) # attn_parameters = set(inspect.signature(self.processor.__call__).parameters.keys()) # diffusers/src/diffusers/models/attention_processor.py:479 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[32].attn.processor, 140581770784784) # return self.processor( # diffusers/src/diffusers/models/attention_processor.py:490 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].attn._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].attn._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].attn._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].attn._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].attn.forward, accessed_by=GetAttrGuardAccessor(forward) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].attn.forward, accessed_by=FuncDefaultsGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].attn.forward.__defaults__[0], accessed_by=GetItemGuardAccessor(0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[32].attn.forward.__defaults__[0], 140591004478624) # batch_size, _, _ = hidden_states.shape if encoder_hidden_states is None else encoder_hidden_states.shape # diffusers/src/diffusers/models/attention_processor.py:1713 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].norm, accessed_by=DictGetItemGuardAccessor(norm) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[32].norm, 140581770784448) # norm_hidden_states, gate = self.norm(hidden_states, emb=temb) # diffusers/src/diffusers/models/transformers/transformer_flux.py:88 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].norm.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[32].norm.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].norm.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[32].norm.training, 140591004393440) # norm_hidden_states, gate = self.norm(hidden_states, emb=temb) # diffusers/src/diffusers/models/transformers/transformer_flux.py:88 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].norm._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].norm.norm, accessed_by=DictGetItemGuardAccessor(norm) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[32].norm.norm, 140581770784592) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:171 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].norm.norm.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].norm.norm.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[32].norm.norm.training, 140591004393440) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:171 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].norm.silu, accessed_by=DictGetItemGuardAccessor(silu) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[32].norm.silu, 140581770784496) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].norm.silu.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].norm.silu.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[32].norm.silu.training, 140591004393440) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].norm.linear, accessed_by=DictGetItemGuardAccessor(linear) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[32].norm.linear, 140533235847408) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].norm.linear.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[32].norm.linear.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].norm.linear.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[32].norm.linear.training, 140591004393408) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].norm.linear._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].norm.linear.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[32].norm.linear.lora_A, 140533235853456) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].norm.linear.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].norm.linear.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[32].norm.linear.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].norm.linear.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[32].norm.linear.lora_A['default_0'], 140533236788688) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].norm.linear.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].norm.linear.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[32].norm.linear.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].norm.linear.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].norm.linear.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[32].norm.linear.lora_A['default_0'].weight, 140537321532896) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].norm.linear.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[32].norm.linear.lora_B, 140533236789072) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].norm.linear.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].norm.linear.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[32].norm.linear.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].norm.linear.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[32].norm.linear.lora_B['default_0'], 140533236787584) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].norm.linear.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].norm.linear.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[32].norm.linear.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].norm.linear.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[32].norm.linear.base_layer, 140581770784544) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].norm.linear.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].norm.linear.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[32].norm.linear.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].norm.linear.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[32].norm.linear.lora_dropout, 140533235847504) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].norm.linear.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].norm.linear.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[32].norm.linear.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].norm.linear.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[32].norm.linear.lora_dropout['default_0'], 140533235847360) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].norm.linear.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].norm.linear.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[32].norm.linear.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].norm.linear.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[32].norm.linear.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[32].norm.linear.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].norm.linear.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[32].norm.linear.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].norm.linear.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[32].norm.linear.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[32].norm.linear.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].norm.linear.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[32].norm.linear.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].norm.linear._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].norm.linear._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].norm.linear.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[32].norm.linear.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[32].norm.linear.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].norm.linear._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[32].norm.linear._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].norm.linear._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].norm.linear._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].norm.linear._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[32].norm.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[32].norm.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].norm._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].norm._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].norm._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].norm._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].act_mlp, accessed_by=DictGetItemGuardAccessor(act_mlp) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[32].act_mlp, 140581770784688) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].act_mlp.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].act_mlp.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[32].act_mlp.training, 140591004393440) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].proj_mlp, accessed_by=DictGetItemGuardAccessor(proj_mlp) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[32].proj_mlp, 140533236792336) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].proj_mlp.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[32].proj_mlp.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].proj_mlp.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[32].proj_mlp.training, 140591004393408) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].proj_mlp._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].proj_mlp.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[32].proj_mlp.lora_A, 140533236787968) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].proj_mlp.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].proj_mlp.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[32].proj_mlp.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].proj_mlp.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[32].proj_mlp.lora_A['default_0'], 140533236784752) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].proj_mlp.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].proj_mlp.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[32].proj_mlp.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].proj_mlp.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].proj_mlp.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[32].proj_mlp.lora_A['default_0'].weight, 140537321534416) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].proj_mlp.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[32].proj_mlp.lora_B, 140533236784176) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].proj_mlp.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].proj_mlp.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[32].proj_mlp.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].proj_mlp.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[32].proj_mlp.lora_B['default_0'], 140533236780528) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].proj_mlp.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].proj_mlp.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[32].proj_mlp.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].proj_mlp.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[32].proj_mlp.base_layer, 140581770784640) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].proj_mlp.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].proj_mlp.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[32].proj_mlp.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].proj_mlp.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[32].proj_mlp.lora_dropout, 140533236793104) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].proj_mlp.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].proj_mlp.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[32].proj_mlp.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].proj_mlp.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[32].proj_mlp.lora_dropout['default_0'], 140533236791808) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].proj_mlp.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].proj_mlp.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[32].proj_mlp.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].proj_mlp.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[32].proj_mlp.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[32].proj_mlp.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].proj_mlp.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[32].proj_mlp.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].proj_mlp.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[32].proj_mlp.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[32].proj_mlp.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].proj_mlp.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[32].proj_mlp.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].proj_mlp._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].proj_mlp._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].proj_mlp.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[32].proj_mlp.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[32].proj_mlp.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].proj_mlp._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[32].proj_mlp._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].proj_mlp._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].proj_mlp._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].proj_mlp._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[32].proj_mlp._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[32].proj_mlp._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].proj_out, accessed_by=DictGetItemGuardAccessor(proj_out) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[32].proj_out, 140533236784224) # hidden_states = gate * self.proj_out(hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:98 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].proj_out.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[32].proj_out.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].proj_out.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[32].proj_out.training, 140591004393408) # hidden_states = gate * self.proj_out(hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:98 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].proj_out._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].proj_out.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[32].proj_out.lora_A, 140533236779856) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].proj_out.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].proj_out.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[32].proj_out.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].proj_out.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[32].proj_out.lora_A['default_0'], 140533236780192) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].proj_out.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].proj_out.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[32].proj_out.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].proj_out.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].proj_out.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[32].proj_out.lora_A['default_0'].weight, 140537321530176) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].proj_out.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[32].proj_out.lora_B, 140533236777216) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].proj_out.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].proj_out.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[32].proj_out.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].proj_out.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[32].proj_out.lora_B['default_0'], 140533236779376) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].proj_out.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].proj_out.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[32].proj_out.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].proj_out.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[32].proj_out.base_layer, 140581770784736) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].proj_out.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].proj_out.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[32].proj_out.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].proj_out.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[32].proj_out.lora_dropout, 140533236786720) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].proj_out.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].proj_out.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[32].proj_out.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].proj_out.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[32].proj_out.lora_dropout['default_0'], 140533236786336) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].proj_out.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].proj_out.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[32].proj_out.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].proj_out.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[32].proj_out.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[32].proj_out.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].proj_out.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[32].proj_out.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].proj_out.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[32].proj_out.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[32].proj_out.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].proj_out.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[32].proj_out.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].proj_out._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].proj_out._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].proj_out.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[32].proj_out.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[32].proj_out.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].proj_out._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[32].proj_out._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].proj_out._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].proj_out._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].proj_out._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[32].proj_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[32].proj_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33], accessed_by=GetItemGuardAccessor(33) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[33], 140581770784400) # for index_block, block in enumerate(self.single_transformer_blocks): # diffusers/src/diffusers/models/transformers/transformer_flux.py:509 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[33].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[33].training, 140591004393440) # for index_block, block in enumerate(self.single_transformer_blocks): # diffusers/src/diffusers/models/transformers/transformer_flux.py:509 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].attn, accessed_by=DictGetItemGuardAccessor(attn) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[33].attn, 140581770785600) # attn_output = self.attn( # diffusers/src/diffusers/models/transformers/transformer_flux.py:91 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].attn.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[33].attn.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].attn.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[33].attn.training, 140591004393440) # attn_output = self.attn( # diffusers/src/diffusers/models/transformers/transformer_flux.py:91 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].attn._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].attn.to_k, accessed_by=DictGetItemGuardAccessor(to_k) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[33].attn.to_k, 140533238090240) # key = attn.to_k(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1717 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].attn.to_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[33].attn.to_k.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].attn.to_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[33].attn.to_k.training, 140591004393408) # key = attn.to_k(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1717 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].attn.to_k._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].attn.to_k.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[33].attn.to_k.lora_A, 140533238090864) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].attn.to_k.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].attn.to_k.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[33].attn.to_k.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].attn.to_k.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[33].attn.to_k.lora_A['default_0'], 140533237344208) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].attn.to_k.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].attn.to_k.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[33].attn.to_k.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].attn.to_k.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].attn.to_k.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[33].attn.to_k.lora_A['default_0'].weight, 140537322593856) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].attn.to_k.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[33].attn.to_k.lora_B, 140533238090288) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].attn.to_k.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].attn.to_k.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[33].attn.to_k.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].attn.to_k.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[33].attn.to_k.lora_B['default_0'], 140533237349344) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].attn.to_k.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].attn.to_k.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[33].attn.to_k.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].attn.to_k.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[33].attn.to_k.base_layer, 140581770785744) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].attn.to_k.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].attn.to_k.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[33].attn.to_k.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].attn.to_k.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[33].attn.to_k.lora_dropout, 140533238089568) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].attn.to_k.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].attn.to_k.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[33].attn.to_k.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].attn.to_k.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[33].attn.to_k.lora_dropout['default_0'], 140533238091248) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].attn.to_k.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].attn.to_k.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[33].attn.to_k.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].attn.to_k.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[33].attn.to_k.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[33].attn.to_k.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].attn.to_k.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[33].attn.to_k.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].attn.to_k.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[33].attn.to_k.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[33].attn.to_k.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].attn.to_k.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[33].attn.to_k.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].attn.to_k._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].attn.to_k._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].attn.to_k.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[33].attn.to_k.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[33].attn.to_k.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].attn.to_k._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[33].attn.to_k._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].attn.to_k._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].attn.to_k._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].attn.to_k._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[33].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[33].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].attn.to_q, accessed_by=DictGetItemGuardAccessor(to_q) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[33].attn.to_q, 140533238093600) # query = attn.to_q(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1716 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].attn.to_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[33].attn.to_q.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].attn.to_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[33].attn.to_q.training, 140591004393408) # query = attn.to_q(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1716 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].attn.to_q._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].attn.to_q.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[33].attn.to_q.lora_A, 140533238091968) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].attn.to_q.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].attn.to_q.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[33].attn.to_q.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].attn.to_q.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[33].attn.to_q.lora_A['default_0'], 140533238088752) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].attn.to_q.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].attn.to_q.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[33].attn.to_q.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].attn.to_q.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].attn.to_q.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[33].attn.to_q.lora_A['default_0'].weight, 140537322596576) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].attn.to_q.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[33].attn.to_q.lora_B, 140533238092544) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].attn.to_q.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].attn.to_q.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[33].attn.to_q.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].attn.to_q.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[33].attn.to_q.lora_B['default_0'], 140533238089328) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].attn.to_q.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].attn.to_q.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[33].attn.to_q.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].attn.to_q.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[33].attn.to_q.base_layer, 140581770785840) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].attn.to_q.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].attn.to_q.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[33].attn.to_q.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].attn.to_q.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[33].attn.to_q.lora_dropout, 140533238093024) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].attn.to_q.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].attn.to_q.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[33].attn.to_q.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].attn.to_q.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[33].attn.to_q.lora_dropout['default_0'], 140533238094416) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].attn.to_q.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].attn.to_q.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[33].attn.to_q.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].attn.to_q.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[33].attn.to_q.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[33].attn.to_q.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].attn.to_q.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[33].attn.to_q.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].attn.to_q.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[33].attn.to_q.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[33].attn.to_q.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].attn.to_q.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[33].attn.to_q.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].attn.to_q._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].attn.to_q._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].attn.to_q.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[33].attn.to_q.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[33].attn.to_q.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].attn.to_q._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[33].attn.to_q._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].attn.to_q._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].attn.to_q._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].attn.to_q._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[33].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[33].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].attn.to_v, accessed_by=DictGetItemGuardAccessor(to_v) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[33].attn.to_v, 140533237343872) # value = attn.to_v(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1718 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].attn.to_v.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[33].attn.to_v.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].attn.to_v.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[33].attn.to_v.training, 140591004393408) # value = attn.to_v(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1718 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].attn.to_v._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].attn.to_v.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[33].attn.to_v.lora_A, 140533237346896) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].attn.to_v.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].attn.to_v.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[33].attn.to_v.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].attn.to_v.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[33].attn.to_v.lora_A['default_0'], 140533237342768) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].attn.to_v.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].attn.to_v.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[33].attn.to_v.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].attn.to_v.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].attn.to_v.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[33].attn.to_v.lora_A['default_0'].weight, 140537322602416) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].attn.to_v.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[33].attn.to_v.lora_B, 140533237347664) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].attn.to_v.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].attn.to_v.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[33].attn.to_v.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].attn.to_v.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[33].attn.to_v.lora_B['default_0'], 140533237344880) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].attn.to_v.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].attn.to_v.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[33].attn.to_v.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].attn.to_v.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[33].attn.to_v.base_layer, 140581770785888) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].attn.to_v.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].attn.to_v.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[33].attn.to_v.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].attn.to_v.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[33].attn.to_v.lora_dropout, 140533237345264) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].attn.to_v.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].attn.to_v.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[33].attn.to_v.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].attn.to_v.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[33].attn.to_v.lora_dropout['default_0'], 140533237343824) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].attn.to_v.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].attn.to_v.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[33].attn.to_v.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].attn.to_v.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[33].attn.to_v.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[33].attn.to_v.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].attn.to_v.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[33].attn.to_v.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].attn.to_v.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[33].attn.to_v.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[33].attn.to_v.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].attn.to_v.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[33].attn.to_v.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].attn.to_v._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].attn.to_v._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].attn.to_v.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[33].attn.to_v.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[33].attn.to_v.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].attn.to_v._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[33].attn.to_v._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].attn.to_v._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].attn.to_v._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].attn.to_v._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[33].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[33].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].attn.norm_k, accessed_by=DictGetItemGuardAccessor(norm_k) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[33].attn.norm_k, 140581770785792) # if attn.norm_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1729 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].attn.norm_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[33].attn.norm_k.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].attn.norm_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[33].attn.norm_k.training, 140591004393440) # if attn.norm_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1729 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].attn.norm_k.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[33].attn.norm_k.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].attn.norm_k._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].attn.norm_k.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[33].attn.norm_k.weight, 140581766107504) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].attn.norm_k._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].attn.norm_k._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].attn.norm_k._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].attn.norm_k._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].attn.norm_q, accessed_by=DictGetItemGuardAccessor(norm_q) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[33].attn.norm_q, 140581770785648) # if attn.norm_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1727 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].attn.norm_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[33].attn.norm_q.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].attn.norm_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[33].attn.norm_q.training, 140591004393440) # if attn.norm_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1727 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].attn.norm_q.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[33].attn.norm_q.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].attn.norm_q._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].attn.norm_q.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[33].attn.norm_q.weight, 140581773258288) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].attn.norm_q._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].attn.norm_q._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].attn.norm_q._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].attn.norm_q._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].attn.heads, accessed_by=DictGetItemGuardAccessor(heads) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[33].attn.heads == 24 # head_dim = inner_dim // attn.heads # diffusers/src/diffusers/models/attention_processor.py:1721 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].attn.processor, accessed_by=DictGetItemGuardAccessor(processor) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[33].attn.processor, 93831581524080) # attn_parameters = set(inspect.signature(self.processor.__call__).parameters.keys()) # diffusers/src/diffusers/models/attention_processor.py:479 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[33].attn.processor, 140581770785552) # return self.processor( # diffusers/src/diffusers/models/attention_processor.py:490 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].attn._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].attn._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].attn._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].attn._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].attn.forward, accessed_by=GetAttrGuardAccessor(forward) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].attn.forward, accessed_by=FuncDefaultsGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].attn.forward.__defaults__[0], accessed_by=GetItemGuardAccessor(0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[33].attn.forward.__defaults__[0], 140591004478624) # batch_size, _, _ = hidden_states.shape if encoder_hidden_states is None else encoder_hidden_states.shape # diffusers/src/diffusers/models/attention_processor.py:1713 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].norm, accessed_by=DictGetItemGuardAccessor(norm) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[33].norm, 140581770785216) # norm_hidden_states, gate = self.norm(hidden_states, emb=temb) # diffusers/src/diffusers/models/transformers/transformer_flux.py:88 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].norm.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[33].norm.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].norm.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[33].norm.training, 140591004393440) # norm_hidden_states, gate = self.norm(hidden_states, emb=temb) # diffusers/src/diffusers/models/transformers/transformer_flux.py:88 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].norm._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].norm.norm, accessed_by=DictGetItemGuardAccessor(norm) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[33].norm.norm, 140581770785360) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:171 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].norm.norm.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].norm.norm.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[33].norm.norm.training, 140591004393440) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:171 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].norm.silu, accessed_by=DictGetItemGuardAccessor(silu) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[33].norm.silu, 140581770785264) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].norm.silu.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].norm.silu.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[33].norm.silu.training, 140591004393440) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].norm.linear, accessed_by=DictGetItemGuardAccessor(linear) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[33].norm.linear, 140533236911408) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].norm.linear.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[33].norm.linear.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].norm.linear.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[33].norm.linear.training, 140591004393408) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].norm.linear._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].norm.linear.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[33].norm.linear.lora_A, 140533236908768) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].norm.linear.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].norm.linear.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[33].norm.linear.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].norm.linear.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[33].norm.linear.lora_A['default_0'], 140533236914336) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].norm.linear.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].norm.linear.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[33].norm.linear.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].norm.linear.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].norm.linear.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[33].norm.linear.lora_A['default_0'].weight, 140537323484592) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].norm.linear.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[33].norm.linear.lora_B, 140533236909104) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].norm.linear.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].norm.linear.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[33].norm.linear.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].norm.linear.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[33].norm.linear.lora_B['default_0'], 140533236912368) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].norm.linear.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].norm.linear.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[33].norm.linear.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].norm.linear.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[33].norm.linear.base_layer, 140581770785312) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].norm.linear.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].norm.linear.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[33].norm.linear.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].norm.linear.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[33].norm.linear.lora_dropout, 140533236916832) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].norm.linear.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].norm.linear.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[33].norm.linear.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].norm.linear.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[33].norm.linear.lora_dropout['default_0'], 140533236913472) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].norm.linear.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].norm.linear.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[33].norm.linear.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].norm.linear.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[33].norm.linear.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[33].norm.linear.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].norm.linear.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[33].norm.linear.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].norm.linear.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[33].norm.linear.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[33].norm.linear.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].norm.linear.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[33].norm.linear.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].norm.linear._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].norm.linear._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].norm.linear.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[33].norm.linear.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[33].norm.linear.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].norm.linear._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[33].norm.linear._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].norm.linear._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].norm.linear._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].norm.linear._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[33].norm.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[33].norm.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].norm._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].norm._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].norm._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].norm._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].act_mlp, accessed_by=DictGetItemGuardAccessor(act_mlp) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[33].act_mlp, 140581770785456) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].act_mlp.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].act_mlp.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[33].act_mlp.training, 140591004393440) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].proj_mlp, accessed_by=DictGetItemGuardAccessor(proj_mlp) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[33].proj_mlp, 140533236910064) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].proj_mlp.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[33].proj_mlp.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].proj_mlp.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[33].proj_mlp.training, 140591004393408) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].proj_mlp._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].proj_mlp.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[33].proj_mlp.lora_A, 140533236915344) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].proj_mlp.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].proj_mlp.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[33].proj_mlp.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].proj_mlp.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[33].proj_mlp.lora_A['default_0'], 140533238102960) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].proj_mlp.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].proj_mlp.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[33].proj_mlp.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].proj_mlp.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].proj_mlp.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[33].proj_mlp.lora_A['default_0'].weight, 140537323479872) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].proj_mlp.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[33].proj_mlp.lora_B, 140533238097968) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].proj_mlp.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].proj_mlp.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[33].proj_mlp.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].proj_mlp.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[33].proj_mlp.lora_B['default_0'], 140533238103440) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].proj_mlp.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].proj_mlp.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[33].proj_mlp.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].proj_mlp.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[33].proj_mlp.base_layer, 140581770785408) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].proj_mlp.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].proj_mlp.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[33].proj_mlp.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].proj_mlp.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[33].proj_mlp.lora_dropout, 140533236912032) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].proj_mlp.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].proj_mlp.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[33].proj_mlp.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].proj_mlp.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[33].proj_mlp.lora_dropout['default_0'], 140533236910112) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].proj_mlp.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].proj_mlp.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[33].proj_mlp.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].proj_mlp.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[33].proj_mlp.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[33].proj_mlp.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].proj_mlp.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[33].proj_mlp.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].proj_mlp.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[33].proj_mlp.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[33].proj_mlp.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].proj_mlp.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[33].proj_mlp.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].proj_mlp._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].proj_mlp._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].proj_mlp.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[33].proj_mlp.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[33].proj_mlp.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].proj_mlp._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[33].proj_mlp._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].proj_mlp._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].proj_mlp._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].proj_mlp._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[33].proj_mlp._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[33].proj_mlp._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].proj_out, accessed_by=DictGetItemGuardAccessor(proj_out) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[33].proj_out, 140533238100176) # hidden_states = gate * self.proj_out(hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:98 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].proj_out.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[33].proj_out.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].proj_out.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[33].proj_out.training, 140591004393408) # hidden_states = gate * self.proj_out(hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:98 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].proj_out._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].proj_out.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[33].proj_out.lora_A, 140533238100512) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].proj_out.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].proj_out.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[33].proj_out.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].proj_out.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[33].proj_out.lora_A['default_0'], 140533238094560) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].proj_out.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].proj_out.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[33].proj_out.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].proj_out.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].proj_out.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[33].proj_out.lora_A['default_0'].weight, 140537323474752) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].proj_out.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[33].proj_out.lora_B, 140533238100752) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].proj_out.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].proj_out.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[33].proj_out.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].proj_out.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[33].proj_out.lora_B['default_0'], 140533238095376) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].proj_out.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].proj_out.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[33].proj_out.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].proj_out.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[33].proj_out.base_layer, 140581770785504) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].proj_out.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].proj_out.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[33].proj_out.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].proj_out.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[33].proj_out.lora_dropout, 140533238095328) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].proj_out.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].proj_out.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[33].proj_out.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].proj_out.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[33].proj_out.lora_dropout['default_0'], 140533238097920) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].proj_out.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].proj_out.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[33].proj_out.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].proj_out.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[33].proj_out.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[33].proj_out.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].proj_out.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[33].proj_out.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].proj_out.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[33].proj_out.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[33].proj_out.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].proj_out.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[33].proj_out.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].proj_out._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].proj_out._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].proj_out.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[33].proj_out.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[33].proj_out.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].proj_out._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[33].proj_out._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].proj_out._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].proj_out._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].proj_out._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[33].proj_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[33].proj_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34], accessed_by=GetItemGuardAccessor(34) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[34], 140581770785168) # for index_block, block in enumerate(self.single_transformer_blocks): # diffusers/src/diffusers/models/transformers/transformer_flux.py:509 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[34].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[34].training, 140591004393440) # for index_block, block in enumerate(self.single_transformer_blocks): # diffusers/src/diffusers/models/transformers/transformer_flux.py:509 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].attn, accessed_by=DictGetItemGuardAccessor(attn) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[34].attn, 140581770786368) # attn_output = self.attn( # diffusers/src/diffusers/models/transformers/transformer_flux.py:91 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].attn.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[34].attn.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].attn.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[34].attn.training, 140591004393440) # attn_output = self.attn( # diffusers/src/diffusers/models/transformers/transformer_flux.py:91 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].attn._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].attn.to_k, accessed_by=DictGetItemGuardAccessor(to_k) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[34].attn.to_k, 140533242132224) # key = attn.to_k(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1717 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].attn.to_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[34].attn.to_k.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].attn.to_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[34].attn.to_k.training, 140591004393408) # key = attn.to_k(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1717 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].attn.to_k._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].attn.to_k.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[34].attn.to_k.lora_A, 140533242131936) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].attn.to_k.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].attn.to_k.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[34].attn.to_k.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].attn.to_k.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[34].attn.to_k.lora_A['default_0'], 140533242126272) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].attn.to_k.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].attn.to_k.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[34].attn.to_k.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].attn.to_k.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].attn.to_k.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[34].attn.to_k.lora_A['default_0'].weight, 140537324557776) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].attn.to_k.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[34].attn.to_k.lora_B, 140533242119840) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].attn.to_k.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].attn.to_k.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[34].attn.to_k.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].attn.to_k.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[34].attn.to_k.lora_B['default_0'], 140533242124016) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].attn.to_k.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].attn.to_k.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[34].attn.to_k.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].attn.to_k.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[34].attn.to_k.base_layer, 140581770786512) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].attn.to_k.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].attn.to_k.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[34].attn.to_k.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].attn.to_k.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[34].attn.to_k.lora_dropout, 140533242126896) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].attn.to_k.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].attn.to_k.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[34].attn.to_k.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].attn.to_k.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[34].attn.to_k.lora_dropout['default_0'], 140533242122048) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].attn.to_k.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].attn.to_k.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[34].attn.to_k.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].attn.to_k.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[34].attn.to_k.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[34].attn.to_k.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].attn.to_k.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[34].attn.to_k.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].attn.to_k.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[34].attn.to_k.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[34].attn.to_k.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].attn.to_k.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[34].attn.to_k.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].attn.to_k._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].attn.to_k._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].attn.to_k.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[34].attn.to_k.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[34].attn.to_k.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].attn.to_k._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[34].attn.to_k._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].attn.to_k._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].attn.to_k._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].attn.to_k._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[34].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[34].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].attn.to_q, accessed_by=DictGetItemGuardAccessor(to_q) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[34].attn.to_q, 140533242130064) # query = attn.to_q(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1716 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].attn.to_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[34].attn.to_q.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].attn.to_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[34].attn.to_q.training, 140591004393408) # query = attn.to_q(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1716 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].attn.to_q._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].attn.to_q.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[34].attn.to_q.lora_A, 140533242132752) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].attn.to_q.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].attn.to_q.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[34].attn.to_q.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].attn.to_q.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[34].attn.to_q.lora_A['default_0'], 140533242126944) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].attn.to_q.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].attn.to_q.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[34].attn.to_q.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].attn.to_q.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].attn.to_q.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[34].attn.to_q.lora_A['default_0'].weight, 140537324559936) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].attn.to_q.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[34].attn.to_q.lora_B, 140533242134144) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].attn.to_q.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].attn.to_q.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[34].attn.to_q.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].attn.to_q.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[34].attn.to_q.lora_B['default_0'], 140533242132512) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].attn.to_q.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].attn.to_q.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[34].attn.to_q.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].attn.to_q.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[34].attn.to_q.base_layer, 140581770786608) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].attn.to_q.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].attn.to_q.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[34].attn.to_q.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].attn.to_q.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[34].attn.to_q.lora_dropout, 140533242128576) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].attn.to_q.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].attn.to_q.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[34].attn.to_q.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].attn.to_q.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[34].attn.to_q.lora_dropout['default_0'], 140533242123248) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].attn.to_q.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].attn.to_q.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[34].attn.to_q.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].attn.to_q.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[34].attn.to_q.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[34].attn.to_q.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].attn.to_q.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[34].attn.to_q.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].attn.to_q.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[34].attn.to_q.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[34].attn.to_q.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].attn.to_q.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[34].attn.to_q.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].attn.to_q._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].attn.to_q._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].attn.to_q.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[34].attn.to_q.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[34].attn.to_q.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].attn.to_q._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[34].attn.to_q._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].attn.to_q._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].attn.to_q._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].attn.to_q._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[34].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[34].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].attn.to_v, accessed_by=DictGetItemGuardAccessor(to_v) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[34].attn.to_v, 140533242130016) # value = attn.to_v(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1718 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].attn.to_v.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[34].attn.to_v.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].attn.to_v.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[34].attn.to_v.training, 140591004393408) # value = attn.to_v(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1718 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].attn.to_v._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].attn.to_v.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[34].attn.to_v.lora_A, 140533242118640) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].attn.to_v.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].attn.to_v.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[34].attn.to_v.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].attn.to_v.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[34].attn.to_v.lora_A['default_0'], 140533242120560) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].attn.to_v.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].attn.to_v.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[34].attn.to_v.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].attn.to_v.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].attn.to_v.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[34].attn.to_v.lora_A['default_0'].weight, 140537324558656) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].attn.to_v.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[34].attn.to_v.lora_B, 140533242119408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].attn.to_v.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].attn.to_v.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[34].attn.to_v.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].attn.to_v.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[34].attn.to_v.lora_B['default_0'], 140533242121904) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].attn.to_v.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].attn.to_v.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[34].attn.to_v.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].attn.to_v.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[34].attn.to_v.base_layer, 140581770786656) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].attn.to_v.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].attn.to_v.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[34].attn.to_v.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].attn.to_v.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[34].attn.to_v.lora_dropout, 140533242129632) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].attn.to_v.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].attn.to_v.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[34].attn.to_v.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].attn.to_v.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[34].attn.to_v.lora_dropout['default_0'], 140533242128240) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].attn.to_v.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].attn.to_v.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[34].attn.to_v.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].attn.to_v.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[34].attn.to_v.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[34].attn.to_v.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].attn.to_v.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[34].attn.to_v.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].attn.to_v.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[34].attn.to_v.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[34].attn.to_v.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].attn.to_v.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[34].attn.to_v.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].attn.to_v._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].attn.to_v._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].attn.to_v.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[34].attn.to_v.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[34].attn.to_v.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].attn.to_v._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[34].attn.to_v._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].attn.to_v._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].attn.to_v._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].attn.to_v._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[34].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[34].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].attn.norm_k, accessed_by=DictGetItemGuardAccessor(norm_k) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[34].attn.norm_k, 140581770786560) # if attn.norm_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1729 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].attn.norm_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[34].attn.norm_k.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].attn.norm_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[34].attn.norm_k.training, 140591004393440) # if attn.norm_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1729 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].attn.norm_k.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[34].attn.norm_k.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].attn.norm_k._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].attn.norm_k.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[34].attn.norm_k.weight, 140581783350592) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].attn.norm_k._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].attn.norm_k._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].attn.norm_k._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].attn.norm_k._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].attn.norm_q, accessed_by=DictGetItemGuardAccessor(norm_q) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[34].attn.norm_q, 140581770786416) # if attn.norm_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1727 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].attn.norm_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[34].attn.norm_q.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].attn.norm_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[34].attn.norm_q.training, 140591004393440) # if attn.norm_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1727 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].attn.norm_q.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[34].attn.norm_q.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].attn.norm_q._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].attn.norm_q.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[34].attn.norm_q.weight, 140581766108944) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].attn.norm_q._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].attn.norm_q._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].attn.norm_q._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].attn.norm_q._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].attn.heads, accessed_by=DictGetItemGuardAccessor(heads) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[34].attn.heads == 24 # head_dim = inner_dim // attn.heads # diffusers/src/diffusers/models/attention_processor.py:1721 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].attn.processor, accessed_by=DictGetItemGuardAccessor(processor) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[34].attn.processor, 93831581524080) # attn_parameters = set(inspect.signature(self.processor.__call__).parameters.keys()) # diffusers/src/diffusers/models/attention_processor.py:479 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[34].attn.processor, 140581770786320) # return self.processor( # diffusers/src/diffusers/models/attention_processor.py:490 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].attn._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].attn._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].attn._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].attn._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].attn.forward, accessed_by=GetAttrGuardAccessor(forward) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].attn.forward, accessed_by=FuncDefaultsGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].attn.forward.__defaults__[0], accessed_by=GetItemGuardAccessor(0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[34].attn.forward.__defaults__[0], 140591004478624) # batch_size, _, _ = hidden_states.shape if encoder_hidden_states is None else encoder_hidden_states.shape # diffusers/src/diffusers/models/attention_processor.py:1713 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].norm, accessed_by=DictGetItemGuardAccessor(norm) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[34].norm, 140581770785984) # norm_hidden_states, gate = self.norm(hidden_states, emb=temb) # diffusers/src/diffusers/models/transformers/transformer_flux.py:88 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].norm.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[34].norm.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].norm.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[34].norm.training, 140591004393440) # norm_hidden_states, gate = self.norm(hidden_states, emb=temb) # diffusers/src/diffusers/models/transformers/transformer_flux.py:88 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].norm._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].norm.norm, accessed_by=DictGetItemGuardAccessor(norm) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[34].norm.norm, 140581770786128) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:171 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].norm.norm.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].norm.norm.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[34].norm.norm.training, 140591004393440) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:171 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].norm.silu, accessed_by=DictGetItemGuardAccessor(silu) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[34].norm.silu, 140581770786032) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].norm.silu.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].norm.silu.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[34].norm.silu.training, 140591004393440) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].norm.linear, accessed_by=DictGetItemGuardAccessor(linear) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[34].norm.linear, 140533237346368) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].norm.linear.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[34].norm.linear.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].norm.linear.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[34].norm.linear.training, 140591004393408) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].norm.linear._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].norm.linear.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[34].norm.linear.lora_A, 140533237346656) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].norm.linear.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].norm.linear.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[34].norm.linear.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].norm.linear.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[34].norm.linear.lora_A['default_0'], 140533237337824) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].norm.linear.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].norm.linear.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[34].norm.linear.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].norm.linear.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].norm.linear.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[34].norm.linear.lora_A['default_0'].weight, 140537322598096) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].norm.linear.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[34].norm.linear.lora_B, 140533237342576) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].norm.linear.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].norm.linear.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[34].norm.linear.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].norm.linear.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[34].norm.linear.lora_B['default_0'], 140533237337584) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].norm.linear.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].norm.linear.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[34].norm.linear.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].norm.linear.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[34].norm.linear.base_layer, 140581770786080) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].norm.linear.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].norm.linear.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[34].norm.linear.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].norm.linear.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[34].norm.linear.lora_dropout, 140533237343632) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].norm.linear.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].norm.linear.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[34].norm.linear.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].norm.linear.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[34].norm.linear.lora_dropout['default_0'], 140533237345600) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].norm.linear.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].norm.linear.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[34].norm.linear.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].norm.linear.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[34].norm.linear.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[34].norm.linear.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].norm.linear.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[34].norm.linear.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].norm.linear.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[34].norm.linear.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[34].norm.linear.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].norm.linear.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[34].norm.linear.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].norm.linear._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].norm.linear._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].norm.linear.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[34].norm.linear.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[34].norm.linear.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].norm.linear._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[34].norm.linear._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].norm.linear._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].norm.linear._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].norm.linear._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[34].norm.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[34].norm.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].norm._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].norm._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].norm._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].norm._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].act_mlp, accessed_by=DictGetItemGuardAccessor(act_mlp) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[34].act_mlp, 140581770786224) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].act_mlp.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].act_mlp.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[34].act_mlp.training, 140591004393440) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].proj_mlp, accessed_by=DictGetItemGuardAccessor(proj_mlp) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[34].proj_mlp, 140533237340128) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].proj_mlp.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[34].proj_mlp.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].proj_mlp.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[34].proj_mlp.training, 140591004393408) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].proj_mlp._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].proj_mlp.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[34].proj_mlp.lora_A, 140533237336864) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].proj_mlp.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].proj_mlp.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[34].proj_mlp.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].proj_mlp.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[34].proj_mlp.lora_A['default_0'], 140533237336144) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].proj_mlp.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].proj_mlp.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[34].proj_mlp.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].proj_mlp.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].proj_mlp.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[34].proj_mlp.lora_A['default_0'].weight, 140537322592976) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].proj_mlp.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[34].proj_mlp.lora_B, 140533237338496) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].proj_mlp.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].proj_mlp.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[34].proj_mlp.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].proj_mlp.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[34].proj_mlp.lora_B['default_0'], 140533237335712) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].proj_mlp.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].proj_mlp.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[34].proj_mlp.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].proj_mlp.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[34].proj_mlp.base_layer, 140581770786176) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].proj_mlp.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].proj_mlp.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[34].proj_mlp.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].proj_mlp.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[34].proj_mlp.lora_dropout, 140533237339552) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].proj_mlp.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].proj_mlp.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[34].proj_mlp.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].proj_mlp.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[34].proj_mlp.lora_dropout['default_0'], 140533237334512) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].proj_mlp.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].proj_mlp.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[34].proj_mlp.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].proj_mlp.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[34].proj_mlp.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[34].proj_mlp.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].proj_mlp.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[34].proj_mlp.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].proj_mlp.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[34].proj_mlp.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[34].proj_mlp.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].proj_mlp.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[34].proj_mlp.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].proj_mlp._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].proj_mlp._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].proj_mlp.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[34].proj_mlp.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[34].proj_mlp.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].proj_mlp._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[34].proj_mlp._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].proj_mlp._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].proj_mlp._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].proj_mlp._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[34].proj_mlp._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[34].proj_mlp._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].proj_out, accessed_by=DictGetItemGuardAccessor(proj_out) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[34].proj_out, 140533237346944) # hidden_states = gate * self.proj_out(hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:98 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].proj_out.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[34].proj_out.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].proj_out.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[34].proj_out.training, 140591004393408) # hidden_states = gate * self.proj_out(hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:98 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].proj_out._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].proj_out.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[34].proj_out.lora_A, 140533237343248) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].proj_out.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].proj_out.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[34].proj_out.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].proj_out.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[34].proj_out.lora_A['default_0'], 140533242130400) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].proj_out.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].proj_out.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[34].proj_out.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].proj_out.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].proj_out.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[34].proj_out.lora_A['default_0'].weight, 140537322586336) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].proj_out.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[34].proj_out.lora_B, 140533237337536) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].proj_out.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].proj_out.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[34].proj_out.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].proj_out.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[34].proj_out.lora_B['default_0'], 140533242128096) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].proj_out.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].proj_out.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[34].proj_out.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].proj_out.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[34].proj_out.base_layer, 140581770786272) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].proj_out.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].proj_out.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[34].proj_out.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].proj_out.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[34].proj_out.lora_dropout, 140533237349776) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].proj_out.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].proj_out.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[34].proj_out.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].proj_out.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[34].proj_out.lora_dropout['default_0'], 140533237350112) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].proj_out.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].proj_out.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[34].proj_out.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].proj_out.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[34].proj_out.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[34].proj_out.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].proj_out.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[34].proj_out.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].proj_out.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[34].proj_out.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[34].proj_out.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].proj_out.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[34].proj_out.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].proj_out._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].proj_out._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].proj_out.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[34].proj_out.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[34].proj_out.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].proj_out._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[34].proj_out._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].proj_out._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].proj_out._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].proj_out._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[34].proj_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[34].proj_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35], accessed_by=GetItemGuardAccessor(35) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[35], 140581770785936) # for index_block, block in enumerate(self.single_transformer_blocks): # diffusers/src/diffusers/models/transformers/transformer_flux.py:509 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[35].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[35].training, 140591004393440) # for index_block, block in enumerate(self.single_transformer_blocks): # diffusers/src/diffusers/models/transformers/transformer_flux.py:509 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].attn, accessed_by=DictGetItemGuardAccessor(attn) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[35].attn, 140581770787136) # attn_output = self.attn( # diffusers/src/diffusers/models/transformers/transformer_flux.py:91 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].attn.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[35].attn.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].attn.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[35].attn.training, 140591004393440) # attn_output = self.attn( # diffusers/src/diffusers/models/transformers/transformer_flux.py:91 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].attn._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].attn.to_k, accessed_by=DictGetItemGuardAccessor(to_k) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[35].attn.to_k, 140537202936224) # key = attn.to_k(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1717 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].attn.to_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[35].attn.to_k.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].attn.to_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[35].attn.to_k.training, 140591004393408) # key = attn.to_k(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1717 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].attn.to_k._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].attn.to_k.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[35].attn.to_k.lora_A, 140537202937712) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].attn.to_k.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].attn.to_k.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[35].attn.to_k.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].attn.to_k.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[35].attn.to_k.lora_A['default_0'], 140537202769728) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].attn.to_k.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].attn.to_k.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[35].attn.to_k.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].attn.to_k.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].attn.to_k.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[35].attn.to_k.lora_A['default_0'].weight, 140537323874928) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].attn.to_k.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[35].attn.to_k.lora_B, 140537202939824) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].attn.to_k.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].attn.to_k.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[35].attn.to_k.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].attn.to_k.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[35].attn.to_k.lora_B['default_0'], 140537202769584) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].attn.to_k.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].attn.to_k.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[35].attn.to_k.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].attn.to_k.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[35].attn.to_k.base_layer, 140581770787280) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].attn.to_k.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].attn.to_k.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[35].attn.to_k.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].attn.to_k.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[35].attn.to_k.lora_dropout, 140537202936416) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].attn.to_k.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].attn.to_k.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[35].attn.to_k.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].attn.to_k.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[35].attn.to_k.lora_dropout['default_0'], 140537202936512) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].attn.to_k.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].attn.to_k.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[35].attn.to_k.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].attn.to_k.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[35].attn.to_k.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[35].attn.to_k.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].attn.to_k.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[35].attn.to_k.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].attn.to_k.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[35].attn.to_k.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[35].attn.to_k.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].attn.to_k.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[35].attn.to_k.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].attn.to_k._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].attn.to_k._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].attn.to_k.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[35].attn.to_k.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[35].attn.to_k.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].attn.to_k._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[35].attn.to_k._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].attn.to_k._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].attn.to_k._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].attn.to_k._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[35].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[35].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].attn.to_q, accessed_by=DictGetItemGuardAccessor(to_q) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[35].attn.to_q, 140537202937520) # query = attn.to_q(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1716 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].attn.to_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[35].attn.to_q.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].attn.to_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[35].attn.to_q.training, 140591004393408) # query = attn.to_q(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1716 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].attn.to_q._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].attn.to_q.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[35].attn.to_q.lora_A, 140537202940448) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].attn.to_q.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].attn.to_q.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[35].attn.to_q.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].attn.to_q.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[35].attn.to_q.lora_A['default_0'], 140537202934256) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].attn.to_q.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].attn.to_q.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[35].attn.to_q.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].attn.to_q.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].attn.to_q.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[35].attn.to_q.lora_A['default_0'].weight, 140537323871568) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].attn.to_q.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[35].attn.to_q.lora_B, 140537202942896) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].attn.to_q.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].attn.to_q.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[35].attn.to_q.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].attn.to_q.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[35].attn.to_q.lora_B['default_0'], 140537202935216) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].attn.to_q.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].attn.to_q.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[35].attn.to_q.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].attn.to_q.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[35].attn.to_q.base_layer, 140581770787376) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].attn.to_q.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].attn.to_q.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[35].attn.to_q.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].attn.to_q.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[35].attn.to_q.lora_dropout, 140537202938864) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].attn.to_q.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].attn.to_q.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[35].attn.to_q.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].attn.to_q.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[35].attn.to_q.lora_dropout['default_0'], 140537202939008) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].attn.to_q.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].attn.to_q.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[35].attn.to_q.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].attn.to_q.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[35].attn.to_q.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[35].attn.to_q.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].attn.to_q.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[35].attn.to_q.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].attn.to_q.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[35].attn.to_q.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[35].attn.to_q.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].attn.to_q.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[35].attn.to_q.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].attn.to_q._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].attn.to_q._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].attn.to_q.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[35].attn.to_q.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[35].attn.to_q.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].attn.to_q._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[35].attn.to_q._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].attn.to_q._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].attn.to_q._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].attn.to_q._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[35].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[35].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].attn.to_v, accessed_by=DictGetItemGuardAccessor(to_v) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[35].attn.to_v, 140537202763968) # value = attn.to_v(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1718 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].attn.to_v.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[35].attn.to_v.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].attn.to_v.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[35].attn.to_v.training, 140591004393408) # value = attn.to_v(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1718 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].attn.to_v._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].attn.to_v.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[35].attn.to_v.lora_A, 140537202762576) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].attn.to_v.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].attn.to_v.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[35].attn.to_v.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].attn.to_v.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[35].attn.to_v.lora_A['default_0'], 140537202763824) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].attn.to_v.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].attn.to_v.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[35].attn.to_v.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].attn.to_v.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].attn.to_v.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[35].attn.to_v.lora_A['default_0'].weight, 140537323868928) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].attn.to_v.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[35].attn.to_v.lora_B, 140537202762624) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].attn.to_v.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].attn.to_v.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[35].attn.to_v.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].attn.to_v.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[35].attn.to_v.lora_B['default_0'], 140537202764160) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].attn.to_v.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].attn.to_v.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[35].attn.to_v.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].attn.to_v.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[35].attn.to_v.base_layer, 140581770787424) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].attn.to_v.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].attn.to_v.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[35].attn.to_v.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].attn.to_v.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[35].attn.to_v.lora_dropout, 140537202766800) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].attn.to_v.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].attn.to_v.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[35].attn.to_v.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].attn.to_v.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[35].attn.to_v.lora_dropout['default_0'], 140537202766416) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].attn.to_v.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].attn.to_v.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[35].attn.to_v.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].attn.to_v.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[35].attn.to_v.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[35].attn.to_v.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].attn.to_v.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[35].attn.to_v.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].attn.to_v.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[35].attn.to_v.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[35].attn.to_v.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].attn.to_v.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[35].attn.to_v.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].attn.to_v._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].attn.to_v._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].attn.to_v.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[35].attn.to_v.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[35].attn.to_v.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].attn.to_v._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[35].attn.to_v._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].attn.to_v._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].attn.to_v._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].attn.to_v._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[35].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[35].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].attn.norm_k, accessed_by=DictGetItemGuardAccessor(norm_k) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[35].attn.norm_k, 140581770787328) # if attn.norm_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1729 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].attn.norm_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[35].attn.norm_k.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].attn.norm_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[35].attn.norm_k.training, 140591004393440) # if attn.norm_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1729 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].attn.norm_k.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[35].attn.norm_k.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].attn.norm_k._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].attn.norm_k.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[35].attn.norm_k.weight, 140581771030704) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].attn.norm_k._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].attn.norm_k._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].attn.norm_k._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].attn.norm_k._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].attn.norm_q, accessed_by=DictGetItemGuardAccessor(norm_q) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[35].attn.norm_q, 140581770787184) # if attn.norm_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1727 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].attn.norm_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[35].attn.norm_q.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].attn.norm_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[35].attn.norm_q.training, 140591004393440) # if attn.norm_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1727 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].attn.norm_q.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[35].attn.norm_q.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].attn.norm_q._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].attn.norm_q.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[35].attn.norm_q.weight, 140581771031264) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].attn.norm_q._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].attn.norm_q._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].attn.norm_q._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].attn.norm_q._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].attn.heads, accessed_by=DictGetItemGuardAccessor(heads) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[35].attn.heads == 24 # head_dim = inner_dim // attn.heads # diffusers/src/diffusers/models/attention_processor.py:1721 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].attn.processor, accessed_by=DictGetItemGuardAccessor(processor) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[35].attn.processor, 93831581524080) # attn_parameters = set(inspect.signature(self.processor.__call__).parameters.keys()) # diffusers/src/diffusers/models/attention_processor.py:479 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[35].attn.processor, 140581770787088) # return self.processor( # diffusers/src/diffusers/models/attention_processor.py:490 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].attn._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].attn._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].attn._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].attn._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].attn.forward, accessed_by=GetAttrGuardAccessor(forward) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].attn.forward, accessed_by=FuncDefaultsGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].attn.forward.__defaults__[0], accessed_by=GetItemGuardAccessor(0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[35].attn.forward.__defaults__[0], 140591004478624) # batch_size, _, _ = hidden_states.shape if encoder_hidden_states is None else encoder_hidden_states.shape # diffusers/src/diffusers/models/attention_processor.py:1713 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].norm, accessed_by=DictGetItemGuardAccessor(norm) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[35].norm, 140581770786752) # norm_hidden_states, gate = self.norm(hidden_states, emb=temb) # diffusers/src/diffusers/models/transformers/transformer_flux.py:88 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].norm.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[35].norm.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].norm.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[35].norm.training, 140591004393440) # norm_hidden_states, gate = self.norm(hidden_states, emb=temb) # diffusers/src/diffusers/models/transformers/transformer_flux.py:88 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].norm._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].norm.norm, accessed_by=DictGetItemGuardAccessor(norm) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[35].norm.norm, 140581770786896) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:171 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].norm.norm.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].norm.norm.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[35].norm.norm.training, 140591004393440) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:171 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].norm.silu, accessed_by=DictGetItemGuardAccessor(silu) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[35].norm.silu, 140581770786800) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].norm.silu.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].norm.silu.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[35].norm.silu.training, 140591004393440) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].norm.linear, accessed_by=DictGetItemGuardAccessor(linear) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[35].norm.linear, 140533242127136) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].norm.linear.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[35].norm.linear.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].norm.linear.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[35].norm.linear.training, 140591004393408) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].norm.linear._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].norm.linear.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[35].norm.linear.lora_A, 140533242127568) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].norm.linear.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].norm.linear.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[35].norm.linear.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].norm.linear.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[35].norm.linear.lora_A['default_0'], 140537202944192) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].norm.linear.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].norm.linear.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[35].norm.linear.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].norm.linear.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].norm.linear.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[35].norm.linear.lora_A['default_0'].weight, 140537324561136) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].norm.linear.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[35].norm.linear.lora_B, 140533242125936) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].norm.linear.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].norm.linear.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[35].norm.linear.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].norm.linear.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[35].norm.linear.lora_B['default_0'], 140537202940688) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].norm.linear.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].norm.linear.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[35].norm.linear.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].norm.linear.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[35].norm.linear.base_layer, 140581770786848) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].norm.linear.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].norm.linear.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[35].norm.linear.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].norm.linear.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[35].norm.linear.lora_dropout, 140533242122480) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].norm.linear.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].norm.linear.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[35].norm.linear.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].norm.linear.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[35].norm.linear.lora_dropout['default_0'], 140533242121952) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].norm.linear.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].norm.linear.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[35].norm.linear.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].norm.linear.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[35].norm.linear.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[35].norm.linear.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].norm.linear.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[35].norm.linear.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].norm.linear.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[35].norm.linear.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[35].norm.linear.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].norm.linear.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[35].norm.linear.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].norm.linear._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].norm.linear._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].norm.linear.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[35].norm.linear.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[35].norm.linear.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].norm.linear._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[35].norm.linear._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].norm.linear._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].norm.linear._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].norm.linear._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[35].norm.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[35].norm.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].norm._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].norm._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].norm._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].norm._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].act_mlp, accessed_by=DictGetItemGuardAccessor(act_mlp) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[35].act_mlp, 140581770786992) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].act_mlp.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].act_mlp.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[35].act_mlp.training, 140591004393440) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].proj_mlp, accessed_by=DictGetItemGuardAccessor(proj_mlp) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[35].proj_mlp, 140537202946640) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].proj_mlp.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[35].proj_mlp.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].proj_mlp.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[35].proj_mlp.training, 140591004393408) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].proj_mlp._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].proj_mlp.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[35].proj_mlp.lora_A, 140537202939728) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].proj_mlp.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].proj_mlp.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[35].proj_mlp.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].proj_mlp.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[35].proj_mlp.lora_A['default_0'], 140537202949904) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].proj_mlp.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].proj_mlp.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[35].proj_mlp.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].proj_mlp.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].proj_mlp.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[35].proj_mlp.lora_A['default_0'].weight, 140537324554256) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].proj_mlp.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[35].proj_mlp.lora_B, 140537202943904) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].proj_mlp.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].proj_mlp.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[35].proj_mlp.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].proj_mlp.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[35].proj_mlp.lora_B['default_0'], 140537202946400) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].proj_mlp.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].proj_mlp.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[35].proj_mlp.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].proj_mlp.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[35].proj_mlp.base_layer, 140581770786944) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].proj_mlp.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].proj_mlp.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[35].proj_mlp.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].proj_mlp.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[35].proj_mlp.lora_dropout, 140537202942656) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].proj_mlp.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].proj_mlp.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[35].proj_mlp.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].proj_mlp.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[35].proj_mlp.lora_dropout['default_0'], 140537202947264) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].proj_mlp.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].proj_mlp.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[35].proj_mlp.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].proj_mlp.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[35].proj_mlp.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[35].proj_mlp.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].proj_mlp.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[35].proj_mlp.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].proj_mlp.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[35].proj_mlp.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[35].proj_mlp.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].proj_mlp.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[35].proj_mlp.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].proj_mlp._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].proj_mlp._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].proj_mlp.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[35].proj_mlp.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[35].proj_mlp.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].proj_mlp._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[35].proj_mlp._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].proj_mlp._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].proj_mlp._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].proj_mlp._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[35].proj_mlp._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[35].proj_mlp._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].proj_out, accessed_by=DictGetItemGuardAccessor(proj_out) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[35].proj_out, 140537202946832) # hidden_states = gate * self.proj_out(hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:98 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].proj_out.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[35].proj_out.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].proj_out.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[35].proj_out.training, 140591004393408) # hidden_states = gate * self.proj_out(hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:98 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].proj_out._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].proj_out.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[35].proj_out.lora_A, 140537202933824) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].proj_out.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].proj_out.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[35].proj_out.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].proj_out.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[35].proj_out.lora_A['default_0'], 140537202934592) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].proj_out.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].proj_out.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[35].proj_out.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].proj_out.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].proj_out.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[35].proj_out.lora_A['default_0'].weight, 140537323872848) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].proj_out.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[35].proj_out.lora_B, 140537202947168) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].proj_out.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].proj_out.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[35].proj_out.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].proj_out.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[35].proj_out.lora_B['default_0'], 140537202935504) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].proj_out.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].proj_out.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[35].proj_out.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].proj_out.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[35].proj_out.base_layer, 140581770787040) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].proj_out.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].proj_out.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[35].proj_out.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].proj_out.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[35].proj_out.lora_dropout, 140537202946688) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].proj_out.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].proj_out.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[35].proj_out.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].proj_out.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[35].proj_out.lora_dropout['default_0'], 140537202947216) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].proj_out.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].proj_out.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[35].proj_out.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].proj_out.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[35].proj_out.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[35].proj_out.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].proj_out.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[35].proj_out.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].proj_out.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[35].proj_out.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[35].proj_out.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].proj_out.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[35].proj_out.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].proj_out._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].proj_out._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].proj_out.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[35].proj_out.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[35].proj_out.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].proj_out._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[35].proj_out._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].proj_out._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].proj_out._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].proj_out._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[35].proj_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[35].proj_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36], accessed_by=GetItemGuardAccessor(36) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[36], 140581770786704) # for index_block, block in enumerate(self.single_transformer_blocks): # diffusers/src/diffusers/models/transformers/transformer_flux.py:509 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[36].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[36].training, 140591004393440) # for index_block, block in enumerate(self.single_transformer_blocks): # diffusers/src/diffusers/models/transformers/transformer_flux.py:509 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].attn, accessed_by=DictGetItemGuardAccessor(attn) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[36].attn, 140581770787904) # attn_output = self.attn( # diffusers/src/diffusers/models/transformers/transformer_flux.py:91 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].attn.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[36].attn.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].attn.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[36].attn.training, 140591004393440) # attn_output = self.attn( # diffusers/src/diffusers/models/transformers/transformer_flux.py:91 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].attn._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].attn.to_k, accessed_by=DictGetItemGuardAccessor(to_k) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[36].attn.to_k, 140537202066816) # key = attn.to_k(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1717 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].attn.to_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[36].attn.to_k.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].attn.to_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[36].attn.to_k.training, 140591004393408) # key = attn.to_k(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1717 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].attn.to_k._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].attn.to_k.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[36].attn.to_k.lora_A, 140537202067008) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].attn.to_k.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].attn.to_k.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[36].attn.to_k.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].attn.to_k.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[36].attn.to_k.lora_A['default_0'], 140537202068976) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].attn.to_k.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].attn.to_k.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[36].attn.to_k.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].attn.to_k.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].attn.to_k.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[36].attn.to_k.lora_A['default_0'].weight, 140537323713168) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].attn.to_k.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[36].attn.to_k.lora_B, 140537202069168) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].attn.to_k.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].attn.to_k.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[36].attn.to_k.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].attn.to_k.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[36].attn.to_k.lora_B['default_0'], 140537202071280) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].attn.to_k.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].attn.to_k.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[36].attn.to_k.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].attn.to_k.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[36].attn.to_k.base_layer, 140581770788048) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].attn.to_k.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].attn.to_k.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[36].attn.to_k.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].attn.to_k.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[36].attn.to_k.lora_dropout, 140537202067920) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].attn.to_k.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].attn.to_k.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[36].attn.to_k.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].attn.to_k.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[36].attn.to_k.lora_dropout['default_0'], 140537202066480) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].attn.to_k.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].attn.to_k.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[36].attn.to_k.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].attn.to_k.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[36].attn.to_k.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[36].attn.to_k.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].attn.to_k.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[36].attn.to_k.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].attn.to_k.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[36].attn.to_k.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[36].attn.to_k.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].attn.to_k.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[36].attn.to_k.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].attn.to_k._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].attn.to_k._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].attn.to_k.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[36].attn.to_k.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[36].attn.to_k.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].attn.to_k._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[36].attn.to_k._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].attn.to_k._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].attn.to_k._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].attn.to_k._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[36].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[36].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].attn.to_q, accessed_by=DictGetItemGuardAccessor(to_q) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[36].attn.to_q, 140537202076848) # query = attn.to_q(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1716 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].attn.to_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[36].attn.to_q.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].attn.to_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[36].attn.to_q.training, 140591004393408) # query = attn.to_q(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1716 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].attn.to_q._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].attn.to_q.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[36].attn.to_q.lora_A, 140537202075120) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].attn.to_q.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].attn.to_q.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[36].attn.to_q.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].attn.to_q.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[36].attn.to_q.lora_A['default_0'], 140537202076080) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].attn.to_q.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].attn.to_q.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[36].attn.to_q.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].attn.to_q.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].attn.to_q.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[36].attn.to_q.lora_A['default_0'].weight, 140537323713328) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].attn.to_q.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[36].attn.to_q.lora_B, 140537202075504) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].attn.to_q.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].attn.to_q.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[36].attn.to_q.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].attn.to_q.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[36].attn.to_q.lora_B['default_0'], 140537202074160) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].attn.to_q.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].attn.to_q.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[36].attn.to_q.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].attn.to_q.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[36].attn.to_q.base_layer, 140581770788144) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].attn.to_q.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].attn.to_q.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[36].attn.to_q.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].attn.to_q.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[36].attn.to_q.lora_dropout, 140537202068736) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].attn.to_q.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].attn.to_q.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[36].attn.to_q.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].attn.to_q.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[36].attn.to_q.lora_dropout['default_0'], 140537202077232) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].attn.to_q.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].attn.to_q.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[36].attn.to_q.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].attn.to_q.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[36].attn.to_q.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[36].attn.to_q.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].attn.to_q.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[36].attn.to_q.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].attn.to_q.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[36].attn.to_q.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[36].attn.to_q.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].attn.to_q.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[36].attn.to_q.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].attn.to_q._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].attn.to_q._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].attn.to_q.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[36].attn.to_q.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[36].attn.to_q.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].attn.to_q._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[36].attn.to_q._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].attn.to_q._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].attn.to_q._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].attn.to_q._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[36].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[36].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].attn.to_v, accessed_by=DictGetItemGuardAccessor(to_v) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[36].attn.to_v, 140537202072048) # value = attn.to_v(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1718 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].attn.to_v.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[36].attn.to_v.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].attn.to_v.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[36].attn.to_v.training, 140591004393408) # value = attn.to_v(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1718 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].attn.to_v._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].attn.to_v.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[36].attn.to_v.lora_A, 140537202072672) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].attn.to_v.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].attn.to_v.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[36].attn.to_v.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].attn.to_v.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[36].attn.to_v.lora_A['default_0'], 140537202078768) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].attn.to_v.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].attn.to_v.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[36].attn.to_v.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].attn.to_v.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].attn.to_v.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[36].attn.to_v.lora_A['default_0'].weight, 140537323711008) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].attn.to_v.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[36].attn.to_v.lora_B, 140537202072096) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].attn.to_v.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].attn.to_v.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[36].attn.to_v.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].attn.to_v.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[36].attn.to_v.lora_B['default_0'], 140537202080400) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].attn.to_v.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].attn.to_v.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[36].attn.to_v.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].attn.to_v.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[36].attn.to_v.base_layer, 140581770788192) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].attn.to_v.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].attn.to_v.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[36].attn.to_v.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].attn.to_v.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[36].attn.to_v.lora_dropout, 140537202070848) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].attn.to_v.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].attn.to_v.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[36].attn.to_v.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].attn.to_v.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[36].attn.to_v.lora_dropout['default_0'], 140537202073056) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].attn.to_v.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].attn.to_v.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[36].attn.to_v.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].attn.to_v.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[36].attn.to_v.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[36].attn.to_v.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].attn.to_v.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[36].attn.to_v.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].attn.to_v.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[36].attn.to_v.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[36].attn.to_v.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].attn.to_v.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[36].attn.to_v.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].attn.to_v._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].attn.to_v._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].attn.to_v.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[36].attn.to_v.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[36].attn.to_v.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].attn.to_v._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[36].attn.to_v._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].attn.to_v._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].attn.to_v._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].attn.to_v._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[36].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[36].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].attn.norm_k, accessed_by=DictGetItemGuardAccessor(norm_k) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[36].attn.norm_k, 140581770788096) # if attn.norm_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1729 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].attn.norm_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[36].attn.norm_k.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].attn.norm_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[36].attn.norm_k.training, 140591004393440) # if attn.norm_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1729 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].attn.norm_k.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[36].attn.norm_k.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].attn.norm_k._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].attn.norm_k.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[36].attn.norm_k.weight, 140581766111904) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].attn.norm_k._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].attn.norm_k._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].attn.norm_k._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].attn.norm_k._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].attn.norm_q, accessed_by=DictGetItemGuardAccessor(norm_q) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[36].attn.norm_q, 140581770787952) # if attn.norm_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1727 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].attn.norm_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[36].attn.norm_q.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].attn.norm_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[36].attn.norm_q.training, 140591004393440) # if attn.norm_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1727 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].attn.norm_q.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[36].attn.norm_q.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].attn.norm_q._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].attn.norm_q.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[36].attn.norm_q.weight, 140581773261648) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].attn.norm_q._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].attn.norm_q._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].attn.norm_q._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].attn.norm_q._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].attn.heads, accessed_by=DictGetItemGuardAccessor(heads) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[36].attn.heads == 24 # head_dim = inner_dim // attn.heads # diffusers/src/diffusers/models/attention_processor.py:1721 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].attn.processor, accessed_by=DictGetItemGuardAccessor(processor) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[36].attn.processor, 93831581524080) # attn_parameters = set(inspect.signature(self.processor.__call__).parameters.keys()) # diffusers/src/diffusers/models/attention_processor.py:479 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[36].attn.processor, 140581770787856) # return self.processor( # diffusers/src/diffusers/models/attention_processor.py:490 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].attn._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].attn._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].attn._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].attn._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].attn.forward, accessed_by=GetAttrGuardAccessor(forward) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].attn.forward, accessed_by=FuncDefaultsGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].attn.forward.__defaults__[0], accessed_by=GetItemGuardAccessor(0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[36].attn.forward.__defaults__[0], 140591004478624) # batch_size, _, _ = hidden_states.shape if encoder_hidden_states is None else encoder_hidden_states.shape # diffusers/src/diffusers/models/attention_processor.py:1713 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].norm, accessed_by=DictGetItemGuardAccessor(norm) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[36].norm, 140581770787520) # norm_hidden_states, gate = self.norm(hidden_states, emb=temb) # diffusers/src/diffusers/models/transformers/transformer_flux.py:88 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].norm.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[36].norm.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].norm.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[36].norm.training, 140591004393440) # norm_hidden_states, gate = self.norm(hidden_states, emb=temb) # diffusers/src/diffusers/models/transformers/transformer_flux.py:88 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].norm._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].norm.norm, accessed_by=DictGetItemGuardAccessor(norm) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[36].norm.norm, 140581770787664) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:171 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].norm.norm.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].norm.norm.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[36].norm.norm.training, 140591004393440) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:171 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].norm.silu, accessed_by=DictGetItemGuardAccessor(silu) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[36].norm.silu, 140581770787568) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].norm.silu.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].norm.silu.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[36].norm.silu.training, 140591004393440) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].norm.linear, accessed_by=DictGetItemGuardAccessor(linear) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[36].norm.linear, 140537202758256) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].norm.linear.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[36].norm.linear.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].norm.linear.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[36].norm.linear.training, 140591004393408) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].norm.linear._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].norm.linear.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[36].norm.linear.lora_A, 140537202756672) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].norm.linear.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].norm.linear.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[36].norm.linear.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].norm.linear.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[36].norm.linear.lora_A['default_0'], 140537202760368) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].norm.linear.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].norm.linear.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[36].norm.linear.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].norm.linear.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].norm.linear.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[36].norm.linear.lora_A['default_0'].weight, 140537323865968) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].norm.linear.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[36].norm.linear.lora_B, 140537202757104) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].norm.linear.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].norm.linear.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[36].norm.linear.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].norm.linear.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[36].norm.linear.lora_B['default_0'], 140537202758688) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].norm.linear.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].norm.linear.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[36].norm.linear.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].norm.linear.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[36].norm.linear.base_layer, 140581770787616) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].norm.linear.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].norm.linear.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[36].norm.linear.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].norm.linear.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[36].norm.linear.lora_dropout, 140537202756528) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].norm.linear.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].norm.linear.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[36].norm.linear.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].norm.linear.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[36].norm.linear.lora_dropout['default_0'], 140537202756816) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].norm.linear.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].norm.linear.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[36].norm.linear.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].norm.linear.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[36].norm.linear.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[36].norm.linear.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].norm.linear.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[36].norm.linear.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].norm.linear.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[36].norm.linear.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[36].norm.linear.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].norm.linear.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[36].norm.linear.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].norm.linear._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].norm.linear._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].norm.linear.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[36].norm.linear.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[36].norm.linear.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].norm.linear._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[36].norm.linear._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].norm.linear._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].norm.linear._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].norm.linear._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[36].norm.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[36].norm.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].norm._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].norm._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].norm._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].norm._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].act_mlp, accessed_by=DictGetItemGuardAccessor(act_mlp) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[36].act_mlp, 140581770787760) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].act_mlp.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].act_mlp.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[36].act_mlp.training, 140591004393440) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].proj_mlp, accessed_by=DictGetItemGuardAccessor(proj_mlp) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[36].proj_mlp, 140537202762096) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].proj_mlp.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[36].proj_mlp.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].proj_mlp.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[36].proj_mlp.training, 140591004393408) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].proj_mlp._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].proj_mlp.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[36].proj_mlp.lora_A, 140537202754032) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].proj_mlp.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].proj_mlp.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[36].proj_mlp.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].proj_mlp.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[36].proj_mlp.lora_A['default_0'], 140537202753984) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].proj_mlp.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].proj_mlp.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[36].proj_mlp.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].proj_mlp.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].proj_mlp.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[36].proj_mlp.lora_A['default_0'].weight, 140537323711728) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].proj_mlp.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[36].proj_mlp.lora_B, 140537202762288) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].proj_mlp.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].proj_mlp.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[36].proj_mlp.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].proj_mlp.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[36].proj_mlp.lora_B['default_0'], 140537202768288) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].proj_mlp.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].proj_mlp.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[36].proj_mlp.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].proj_mlp.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[36].proj_mlp.base_layer, 140581770787712) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].proj_mlp.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].proj_mlp.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[36].proj_mlp.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].proj_mlp.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[36].proj_mlp.lora_dropout, 140537202760800) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].proj_mlp.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].proj_mlp.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[36].proj_mlp.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].proj_mlp.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[36].proj_mlp.lora_dropout['default_0'], 140537202758016) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].proj_mlp.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].proj_mlp.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[36].proj_mlp.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].proj_mlp.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[36].proj_mlp.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[36].proj_mlp.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].proj_mlp.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[36].proj_mlp.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].proj_mlp.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[36].proj_mlp.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[36].proj_mlp.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].proj_mlp.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[36].proj_mlp.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].proj_mlp._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].proj_mlp._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].proj_mlp.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[36].proj_mlp.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[36].proj_mlp.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].proj_mlp._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[36].proj_mlp._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].proj_mlp._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].proj_mlp._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].proj_mlp._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[36].proj_mlp._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[36].proj_mlp._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].proj_out, accessed_by=DictGetItemGuardAccessor(proj_out) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[36].proj_out, 140537202766992) # hidden_states = gate * self.proj_out(hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:98 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].proj_out.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[36].proj_out.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].proj_out.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[36].proj_out.training, 140591004393408) # hidden_states = gate * self.proj_out(hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:98 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].proj_out._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].proj_out.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[36].proj_out.lora_A, 140537202756768) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].proj_out.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].proj_out.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[36].proj_out.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].proj_out.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[36].proj_out.lora_A['default_0'], 140537202080304) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].proj_out.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].proj_out.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[36].proj_out.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].proj_out.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].proj_out.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[36].proj_out.lora_A['default_0'].weight, 140537323707888) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].proj_out.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[36].proj_out.lora_B, 140537202072720) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].proj_out.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].proj_out.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[36].proj_out.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].proj_out.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[36].proj_out.lora_B['default_0'], 140537202080064) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].proj_out.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].proj_out.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[36].proj_out.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].proj_out.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[36].proj_out.base_layer, 140581770787808) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].proj_out.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].proj_out.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[36].proj_out.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].proj_out.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[36].proj_out.lora_dropout, 140537202762240) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].proj_out.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].proj_out.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[36].proj_out.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].proj_out.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[36].proj_out.lora_dropout['default_0'], 140537202758976) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].proj_out.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].proj_out.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[36].proj_out.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].proj_out.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[36].proj_out.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[36].proj_out.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].proj_out.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[36].proj_out.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].proj_out.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[36].proj_out.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[36].proj_out.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].proj_out.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[36].proj_out.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].proj_out._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].proj_out._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].proj_out.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[36].proj_out.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[36].proj_out.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].proj_out._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[36].proj_out._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].proj_out._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].proj_out._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].proj_out._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[36].proj_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[36].proj_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37], accessed_by=GetItemGuardAccessor(37) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[37], 140581770787472) # for index_block, block in enumerate(self.single_transformer_blocks): # diffusers/src/diffusers/models/transformers/transformer_flux.py:509 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[37].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[37].training, 140591004393440) # for index_block, block in enumerate(self.single_transformer_blocks): # diffusers/src/diffusers/models/transformers/transformer_flux.py:509 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].attn, accessed_by=DictGetItemGuardAccessor(attn) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[37].attn, 140581770788672) # attn_output = self.attn( # diffusers/src/diffusers/models/transformers/transformer_flux.py:91 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].attn.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[37].attn.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].attn.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[37].attn.training, 140591004393440) # attn_output = self.attn( # diffusers/src/diffusers/models/transformers/transformer_flux.py:91 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].attn._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].attn.to_k, accessed_by=DictGetItemGuardAccessor(to_k) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[37].attn.to_k, 140537203617952) # key = attn.to_k(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1717 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].attn.to_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[37].attn.to_k.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].attn.to_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[37].attn.to_k.training, 140591004393408) # key = attn.to_k(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1717 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].attn.to_k._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].attn.to_k.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[37].attn.to_k.lora_A, 140537203619536) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].attn.to_k.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].attn.to_k.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[37].attn.to_k.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].attn.to_k.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[37].attn.to_k.lora_A['default_0'], 140537203612864) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].attn.to_k.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].attn.to_k.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[37].attn.to_k.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].attn.to_k.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].attn.to_k.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[37].attn.to_k.lora_A['default_0'].weight, 140537325517488) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].attn.to_k.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[37].attn.to_k.lora_B, 140537203617904) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].attn.to_k.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].attn.to_k.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[37].attn.to_k.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].attn.to_k.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[37].attn.to_k.lora_B['default_0'], 140537203610464) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].attn.to_k.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].attn.to_k.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[37].attn.to_k.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].attn.to_k.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[37].attn.to_k.base_layer, 140581765087296) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].attn.to_k.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].attn.to_k.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[37].attn.to_k.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].attn.to_k.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[37].attn.to_k.lora_dropout, 140537203616752) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].attn.to_k.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].attn.to_k.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[37].attn.to_k.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].attn.to_k.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[37].attn.to_k.lora_dropout['default_0'], 140537203617760) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].attn.to_k.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].attn.to_k.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[37].attn.to_k.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].attn.to_k.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[37].attn.to_k.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[37].attn.to_k.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].attn.to_k.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[37].attn.to_k.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].attn.to_k.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[37].attn.to_k.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[37].attn.to_k.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].attn.to_k.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[37].attn.to_k.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].attn.to_k._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].attn.to_k._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].attn.to_k.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[37].attn.to_k.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[37].attn.to_k.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].attn.to_k._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[37].attn.to_k._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].attn.to_k._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].attn.to_k._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].attn.to_k._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[37].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[37].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].attn.to_q, accessed_by=DictGetItemGuardAccessor(to_q) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[37].attn.to_q, 140537203452048) # query = attn.to_q(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1716 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].attn.to_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[37].attn.to_q.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].attn.to_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[37].attn.to_q.training, 140591004393408) # query = attn.to_q(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1716 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].attn.to_q._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].attn.to_q.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[37].attn.to_q.lora_A, 140537203616320) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].attn.to_q.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].attn.to_q.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[37].attn.to_q.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].attn.to_q.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[37].attn.to_q.lora_A['default_0'], 140537203606000) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].attn.to_q.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].attn.to_q.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[37].attn.to_q.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].attn.to_q.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].attn.to_q.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[37].attn.to_q.lora_A['default_0'].weight, 140537325507808) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].attn.to_q.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[37].attn.to_q.lora_B, 140537203608304) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].attn.to_q.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].attn.to_q.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[37].attn.to_q.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].attn.to_q.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[37].attn.to_q.lora_B['default_0'], 140537203620592) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].attn.to_q.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].attn.to_q.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[37].attn.to_q.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].attn.to_q.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[37].attn.to_q.base_layer, 140581765087344) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].attn.to_q.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].attn.to_q.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[37].attn.to_q.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].attn.to_q.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[37].attn.to_q.lora_dropout, 140537203449744) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].attn.to_q.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].attn.to_q.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[37].attn.to_q.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].attn.to_q.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[37].attn.to_q.lora_dropout['default_0'], 140537203450704) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].attn.to_q.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].attn.to_q.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[37].attn.to_q.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].attn.to_q.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[37].attn.to_q.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[37].attn.to_q.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].attn.to_q.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[37].attn.to_q.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].attn.to_q.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[37].attn.to_q.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[37].attn.to_q.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].attn.to_q.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[37].attn.to_q.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].attn.to_q._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].attn.to_q._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].attn.to_q.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[37].attn.to_q.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[37].attn.to_q.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].attn.to_q._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[37].attn.to_q._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].attn.to_q._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].attn.to_q._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].attn.to_q._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[37].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[37].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].attn.to_v, accessed_by=DictGetItemGuardAccessor(to_v) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[37].attn.to_v, 140537203614208) # value = attn.to_v(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1718 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].attn.to_v.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[37].attn.to_v.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].attn.to_v.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[37].attn.to_v.training, 140591004393408) # value = attn.to_v(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1718 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].attn.to_v._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].attn.to_v.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[37].attn.to_v.lora_A, 140537203614448) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].attn.to_v.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].attn.to_v.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[37].attn.to_v.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].attn.to_v.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[37].attn.to_v.lora_A['default_0'], 140537203605904) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].attn.to_v.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].attn.to_v.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[37].attn.to_v.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].attn.to_v.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].attn.to_v.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[37].attn.to_v.lora_A['default_0'].weight, 140537325502848) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].attn.to_v.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[37].attn.to_v.lora_B, 140537203610896) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].attn.to_v.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].attn.to_v.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[37].attn.to_v.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].attn.to_v.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[37].attn.to_v.lora_B['default_0'], 140537203605568) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].attn.to_v.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].attn.to_v.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[37].attn.to_v.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].attn.to_v.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[37].attn.to_v.base_layer, 140581765087392) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].attn.to_v.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].attn.to_v.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[37].attn.to_v.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].attn.to_v.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[37].attn.to_v.lora_dropout, 140537203609552) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].attn.to_v.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].attn.to_v.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[37].attn.to_v.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].attn.to_v.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[37].attn.to_v.lora_dropout['default_0'], 140537203614352) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].attn.to_v.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].attn.to_v.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[37].attn.to_v.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].attn.to_v.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[37].attn.to_v.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[37].attn.to_v.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].attn.to_v.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[37].attn.to_v.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].attn.to_v.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[37].attn.to_v.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[37].attn.to_v.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].attn.to_v.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[37].attn.to_v.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].attn.to_v._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].attn.to_v._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].attn.to_v.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[37].attn.to_v.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[37].attn.to_v.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].attn.to_v._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[37].attn.to_v._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].attn.to_v._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].attn.to_v._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].attn.to_v._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[37].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[37].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].attn.norm_k, accessed_by=DictGetItemGuardAccessor(norm_k) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[37].attn.norm_k, 140581770788816) # if attn.norm_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1729 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].attn.norm_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[37].attn.norm_k.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].attn.norm_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[37].attn.norm_k.training, 140591004393440) # if attn.norm_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1729 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].attn.norm_k.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[37].attn.norm_k.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].attn.norm_k._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].attn.norm_k.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[37].attn.norm_k.weight, 140581766113424) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].attn.norm_k._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].attn.norm_k._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].attn.norm_k._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].attn.norm_k._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].attn.norm_q, accessed_by=DictGetItemGuardAccessor(norm_q) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[37].attn.norm_q, 140581770788720) # if attn.norm_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1727 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].attn.norm_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[37].attn.norm_q.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].attn.norm_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[37].attn.norm_q.training, 140591004393440) # if attn.norm_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1727 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].attn.norm_q.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[37].attn.norm_q.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].attn.norm_q._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].attn.norm_q.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[37].attn.norm_q.weight, 140581766113904) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].attn.norm_q._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].attn.norm_q._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].attn.norm_q._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].attn.norm_q._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].attn.heads, accessed_by=DictGetItemGuardAccessor(heads) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[37].attn.heads == 24 # head_dim = inner_dim // attn.heads # diffusers/src/diffusers/models/attention_processor.py:1721 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].attn.processor, accessed_by=DictGetItemGuardAccessor(processor) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[37].attn.processor, 93831581524080) # attn_parameters = set(inspect.signature(self.processor.__call__).parameters.keys()) # diffusers/src/diffusers/models/attention_processor.py:479 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[37].attn.processor, 140581770788624) # return self.processor( # diffusers/src/diffusers/models/attention_processor.py:490 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].attn._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].attn._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].attn._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].attn._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].attn.forward, accessed_by=GetAttrGuardAccessor(forward) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].attn.forward, accessed_by=FuncDefaultsGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].attn.forward.__defaults__[0], accessed_by=GetItemGuardAccessor(0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[37].attn.forward.__defaults__[0], 140591004478624) # batch_size, _, _ = hidden_states.shape if encoder_hidden_states is None else encoder_hidden_states.shape # diffusers/src/diffusers/models/attention_processor.py:1713 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].norm, accessed_by=DictGetItemGuardAccessor(norm) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[37].norm, 140581770788288) # norm_hidden_states, gate = self.norm(hidden_states, emb=temb) # diffusers/src/diffusers/models/transformers/transformer_flux.py:88 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].norm.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[37].norm.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].norm.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[37].norm.training, 140591004393440) # norm_hidden_states, gate = self.norm(hidden_states, emb=temb) # diffusers/src/diffusers/models/transformers/transformer_flux.py:88 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].norm._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].norm.norm, accessed_by=DictGetItemGuardAccessor(norm) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[37].norm.norm, 140581770788432) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:171 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].norm.norm.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].norm.norm.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[37].norm.norm.training, 140591004393440) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:171 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].norm.silu, accessed_by=DictGetItemGuardAccessor(silu) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[37].norm.silu, 140581770788336) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].norm.silu.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].norm.silu.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[37].norm.silu.training, 140591004393440) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].norm.linear, accessed_by=DictGetItemGuardAccessor(linear) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[37].norm.linear, 140537203457088) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].norm.linear.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[37].norm.linear.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].norm.linear.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[37].norm.linear.training, 140591004393408) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].norm.linear._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].norm.linear.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[37].norm.linear.lora_A, 140537203454208) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].norm.linear.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].norm.linear.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[37].norm.linear.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].norm.linear.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[37].norm.linear.lora_A['default_0'], 140537203449456) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].norm.linear.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].norm.linear.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[37].norm.linear.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].norm.linear.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].norm.linear.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[37].norm.linear.lora_A['default_0'].weight, 140537323702928) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].norm.linear.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[37].norm.linear.lora_B, 140537203456992) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].norm.linear.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].norm.linear.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[37].norm.linear.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].norm.linear.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[37].norm.linear.lora_B['default_0'], 140537203448784) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].norm.linear.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].norm.linear.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[37].norm.linear.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].norm.linear.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[37].norm.linear.base_layer, 140581770788384) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].norm.linear.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].norm.linear.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[37].norm.linear.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].norm.linear.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[37].norm.linear.lora_dropout, 140537203455408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].norm.linear.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].norm.linear.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[37].norm.linear.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].norm.linear.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[37].norm.linear.lora_dropout['default_0'], 140537203456608) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].norm.linear.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].norm.linear.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[37].norm.linear.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].norm.linear.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[37].norm.linear.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[37].norm.linear.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].norm.linear.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[37].norm.linear.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].norm.linear.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[37].norm.linear.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[37].norm.linear.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].norm.linear.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[37].norm.linear.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].norm.linear._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].norm.linear._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].norm.linear.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[37].norm.linear.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[37].norm.linear.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].norm.linear._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[37].norm.linear._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].norm.linear._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].norm.linear._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].norm.linear._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[37].norm.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[37].norm.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].norm._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].norm._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].norm._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].norm._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].act_mlp, accessed_by=DictGetItemGuardAccessor(act_mlp) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[37].act_mlp, 140581770788528) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].act_mlp.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].act_mlp.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[37].act_mlp.training, 140591004393440) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].proj_mlp, accessed_by=DictGetItemGuardAccessor(proj_mlp) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[37].proj_mlp, 140537203442016) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].proj_mlp.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[37].proj_mlp.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].proj_mlp.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[37].proj_mlp.training, 140591004393408) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].proj_mlp._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].proj_mlp.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[37].proj_mlp.lora_A, 140537203450656) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].proj_mlp.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].proj_mlp.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[37].proj_mlp.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].proj_mlp.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[37].proj_mlp.lora_A['default_0'], 140537203443360) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].proj_mlp.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].proj_mlp.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[37].proj_mlp.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].proj_mlp.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].proj_mlp.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[37].proj_mlp.lora_A['default_0'].weight, 140537325512368) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].proj_mlp.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[37].proj_mlp.lora_B, 140537203452144) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].proj_mlp.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].proj_mlp.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[37].proj_mlp.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].proj_mlp.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[37].proj_mlp.lora_B['default_0'], 140537203443744) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].proj_mlp.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].proj_mlp.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[37].proj_mlp.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].proj_mlp.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[37].proj_mlp.base_layer, 140581770788480) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].proj_mlp.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].proj_mlp.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[37].proj_mlp.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].proj_mlp.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[37].proj_mlp.lora_dropout, 140537203447392) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].proj_mlp.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].proj_mlp.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[37].proj_mlp.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].proj_mlp.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[37].proj_mlp.lora_dropout['default_0'], 140537203450752) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].proj_mlp.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].proj_mlp.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[37].proj_mlp.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].proj_mlp.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[37].proj_mlp.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[37].proj_mlp.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].proj_mlp.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[37].proj_mlp.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].proj_mlp.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[37].proj_mlp.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[37].proj_mlp.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].proj_mlp.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[37].proj_mlp.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].proj_mlp._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].proj_mlp._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].proj_mlp.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[37].proj_mlp.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[37].proj_mlp.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].proj_mlp._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[37].proj_mlp._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].proj_mlp._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].proj_mlp._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].proj_mlp._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[37].proj_mlp._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[37].proj_mlp._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].proj_out, accessed_by=DictGetItemGuardAccessor(proj_out) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[37].proj_out, 140537203445328) # hidden_states = gate * self.proj_out(hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:98 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].proj_out.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[37].proj_out.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].proj_out.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[37].proj_out.training, 140591004393408) # hidden_states = gate * self.proj_out(hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:98 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].proj_out._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].proj_out.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[37].proj_out.lora_A, 140537203445712) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].proj_out.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].proj_out.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[37].proj_out.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].proj_out.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[37].proj_out.lora_A['default_0'], 140537203446864) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].proj_out.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].proj_out.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[37].proj_out.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].proj_out.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].proj_out.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[37].proj_out.lora_A['default_0'].weight, 140537325505328) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].proj_out.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[37].proj_out.lora_B, 140537203445760) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].proj_out.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].proj_out.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[37].proj_out.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].proj_out.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[37].proj_out.lora_B['default_0'], 140537203446672) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].proj_out.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].proj_out.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[37].proj_out.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].proj_out.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[37].proj_out.base_layer, 140581770788576) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].proj_out.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].proj_out.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[37].proj_out.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].proj_out.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[37].proj_out.lora_dropout, 140537203444848) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].proj_out.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].proj_out.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[37].proj_out.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].proj_out.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[37].proj_out.lora_dropout['default_0'], 140537203443840) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].proj_out.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].proj_out.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[37].proj_out.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].proj_out.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[37].proj_out.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[37].proj_out.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].proj_out.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[37].proj_out.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].proj_out.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[37].proj_out.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[37].proj_out.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].proj_out.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[37].proj_out.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].proj_out._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].proj_out._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].proj_out.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[37].proj_out.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[37].proj_out.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].proj_out._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[37].proj_out._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].proj_out._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].proj_out._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].proj_out._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[37].proj_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[37].proj_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | +- GuardManager: source=L['img_ids'], accessed_by=DictGetItemGuardAccessor(img_ids) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | +- TENSOR_MATCH: check_tensor(L['img_ids'], Tensor, DispatchKeySet(CUDA, BackendSelect, ADInplaceOrView, AutogradCUDA), torch.bfloat16, device=0, requires_grad=False, size=[4096, 3], stride=[3, 1]) # if img_ids.ndim == 3: # diffusers/src/diffusers/models/transformers/transformer_flux.py:462 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | +- NO_HASATTR: hasattr(L['img_ids'], '_dynamo_dynamic_indices') == False # if img_ids.ndim == 3: # diffusers/src/diffusers/models/transformers/transformer_flux.py:462 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | +- NO_TENSOR_ALIASING: check_no_aliasing(L['img_ids'], L['txt_ids'], L['guidance'], L['timestep'], L['hidden_states'], L['pooled_projections'], L['encoder_hidden_states']) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | +- GuardManager: source=L['txt_ids'], accessed_by=DictGetItemGuardAccessor(txt_ids) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | +- TENSOR_MATCH: check_tensor(L['txt_ids'], Tensor, DispatchKeySet(CUDA, BackendSelect, ADInplaceOrView, AutogradCUDA), torch.bfloat16, device=0, requires_grad=False, size=[512, 3], stride=[3, 1]) # if txt_ids.ndim == 3: # diffusers/src/diffusers/models/transformers/transformer_flux.py:456 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | +- NO_HASATTR: hasattr(L['txt_ids'], '_dynamo_dynamic_indices') == False # if txt_ids.ndim == 3: # diffusers/src/diffusers/models/transformers/transformer_flux.py:456 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | +- NO_TENSOR_ALIASING: check_no_aliasing(L['img_ids'], L['txt_ids'], L['guidance'], L['timestep'], L['hidden_states'], L['pooled_projections'], L['encoder_hidden_states']) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | +- GuardManager: source=L['guidance'], accessed_by=DictGetItemGuardAccessor(guidance) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | +- TENSOR_MATCH: check_tensor(L['guidance'], Tensor, DispatchKeySet(CUDA, BackendSelect, ADInplaceOrView, AutogradCUDA), torch.float32, device=0, requires_grad=False, size=[1], stride=[1]) # if guidance is not None: # diffusers/src/diffusers/models/transformers/transformer_flux.py:445 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | +- NO_HASATTR: hasattr(L['guidance'], '_dynamo_dynamic_indices') == False # if guidance is not None: # diffusers/src/diffusers/models/transformers/transformer_flux.py:445 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | +- NO_TENSOR_ALIASING: check_no_aliasing(L['img_ids'], L['txt_ids'], L['guidance'], L['timestep'], L['hidden_states'], L['pooled_projections'], L['encoder_hidden_states']) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | +- GuardManager: source=L['timestep'], accessed_by=DictGetItemGuardAccessor(timestep) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | +- TENSOR_MATCH: check_tensor(L['timestep'], Tensor, DispatchKeySet(CUDA, BackendSelect, ADInplaceOrView, AutogradCUDA), torch.bfloat16, device=0, requires_grad=False, size=[1], stride=[1]) # timestep = timestep.to(hidden_states.dtype) * 1000 # diffusers/src/diffusers/models/transformers/transformer_flux.py:444 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | +- NO_HASATTR: hasattr(L['timestep'], '_dynamo_dynamic_indices') == False # timestep = timestep.to(hidden_states.dtype) * 1000 # diffusers/src/diffusers/models/transformers/transformer_flux.py:444 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | +- NO_TENSOR_ALIASING: check_no_aliasing(L['img_ids'], L['txt_ids'], L['guidance'], L['timestep'], L['hidden_states'], L['pooled_projections'], L['encoder_hidden_states']) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | +- GuardManager: source=L['return_dict'], accessed_by=DictGetItemGuardAccessor(return_dict) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | +- ID_MATCH: ___check_obj_id(L['return_dict'], 140591004393440) # if not return_dict: # diffusers/src/diffusers/models/transformers/transformer_flux.py:555 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | +- GuardManager: source=L['hidden_states'], accessed_by=DictGetItemGuardAccessor(hidden_states) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | +- TENSOR_MATCH: check_tensor(L['hidden_states'], Tensor, DispatchKeySet(CUDA, BackendSelect, ADInplaceOrView, AutogradCUDA), torch.bfloat16, device=0, requires_grad=False, size=[1, 4096, 64], stride=[262144, 64, 1]) # hidden_states = self.x_embedder(hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:442 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | +- NO_HASATTR: hasattr(L['hidden_states'], '_dynamo_dynamic_indices') == False # hidden_states = self.x_embedder(hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:442 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | +- NO_TENSOR_ALIASING: check_no_aliasing(L['img_ids'], L['txt_ids'], L['guidance'], L['timestep'], L['hidden_states'], L['pooled_projections'], L['encoder_hidden_states']) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | +- GuardManager: source=L['pooled_projections'], accessed_by=DictGetItemGuardAccessor(pooled_projections) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | +- TENSOR_MATCH: check_tensor(L['pooled_projections'], Tensor, DispatchKeySet(CUDA, BackendSelect, ADInplaceOrView, AutogradCUDA), torch.bfloat16, device=0, requires_grad=False, size=[1, 768], stride=[768, 1]) # timesteps_emb = self.timestep_embedder(timesteps_proj.to(dtype=pooled_projection.dtype)) # (N, D) # diffusers/src/diffusers/models/embeddings.py:1060 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | +- NO_HASATTR: hasattr(L['pooled_projections'], '_dynamo_dynamic_indices') == False # timesteps_emb = self.timestep_embedder(timesteps_proj.to(dtype=pooled_projection.dtype)) # (N, D) # diffusers/src/diffusers/models/embeddings.py:1060 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | +- NO_TENSOR_ALIASING: check_no_aliasing(L['img_ids'], L['txt_ids'], L['guidance'], L['timestep'], L['hidden_states'], L['pooled_projections'], L['encoder_hidden_states']) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | +- GuardManager: source=L['encoder_hidden_states'], accessed_by=DictGetItemGuardAccessor(encoder_hidden_states) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | +- TENSOR_MATCH: check_tensor(L['encoder_hidden_states'], Tensor, DispatchKeySet(CUDA, BackendSelect, ADInplaceOrView, AutogradCUDA), torch.bfloat16, device=0, requires_grad=False, size=[1, 512, 4096], stride=[2097152, 4096, 1]) # encoder_hidden_states = self.context_embedder(encoder_hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:454 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | +- NO_HASATTR: hasattr(L['encoder_hidden_states'], '_dynamo_dynamic_indices') == False # encoder_hidden_states = self.context_embedder(encoder_hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:454 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | +- NO_TENSOR_ALIASING: check_no_aliasing(L['img_ids'], L['txt_ids'], L['guidance'], L['timestep'], L['hidden_states'], L['pooled_projections'], L['encoder_hidden_states']) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | +- GuardManager: source=L['joint_attention_kwargs'], accessed_by=DictGetItemGuardAccessor(joint_attention_kwargs) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | +- ID_MATCH: ___check_obj_id(L['joint_attention_kwargs'], 140591004478624) # if joint_attention_kwargs is not None: # diffusers/src/diffusers/models/transformers/transformer_flux.py:428 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | +- GuardManager: source=L['controlnet_block_samples'], accessed_by=DictGetItemGuardAccessor(controlnet_block_samples) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | +- ID_MATCH: ___check_obj_id(L['controlnet_block_samples'], 140591004478624) # if controlnet_block_samples is not None: # diffusers/src/diffusers/models/transformers/transformer_flux.py:502 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | +- GuardManager: source=L['controlnet_single_block_samples'], accessed_by=DictGetItemGuardAccessor(controlnet_single_block_samples) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | +- ID_MATCH: ___check_obj_id(L['controlnet_single_block_samples'], 140591004478624) # if controlnet_single_block_samples is not None: # diffusers/src/diffusers/models/transformers/transformer_flux.py:538 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | +- GuardManager: source=G, accessed_by=GlobalsGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | +- GuardManager: source=G['torch'], accessed_by=DictGetItemGuardAccessor(torch) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | +- ID_MATCH: ___check_obj_id(G['torch'], 140590979095808) # ids = torch.cat((txt_ids, img_ids), dim=0) # diffusers/src/diffusers/models/transformers/transformer_flux.py:468 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | +- GuardManager: source=G['torch'].cat, accessed_by=GetAttrGuardAccessor(cat) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | +- ID_MATCH: ___check_obj_id(G['torch'].cat, 140590976095136) # ids = torch.cat((txt_ids, img_ids), dim=0) # diffusers/src/diffusers/models/transformers/transformer_flux.py:468 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | +- GuardManager: source=G['torch'].float16, accessed_by=GetAttrGuardAccessor(float16) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | +- EQUALS_MATCH: G['torch'].float16 == torch.float16 # if encoder_hidden_states.dtype == torch.float16: # diffusers/src/diffusers/models/transformers/transformer_flux.py:200 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | +- GuardManager: source=G['USE_PEFT_BACKEND'], accessed_by=DictGetItemGuardAccessor(USE_PEFT_BACKEND) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | +- ID_MATCH: ___check_obj_id(G['USE_PEFT_BACKEND'], 140591004393408) # if USE_PEFT_BACKEND: # diffusers/src/diffusers/models/transformers/transformer_flux.py:434 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | +- GuardManager: source=G['scale_lora_layers'], accessed_by=DictGetItemGuardAccessor(scale_lora_layers) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | +- GuardManager: source=G['scale_lora_layers'].__code__, accessed_by=GetAttrGuardAccessor(__code__) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | +- ID_MATCH: ___check_obj_id(G['scale_lora_layers'].__code__, 140585209572752) # scale_lora_layers(self, lora_scale) # diffusers/src/diffusers/models/transformers/transformer_flux.py:436 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | +- GuardManager: source=G['unscale_lora_layers'], accessed_by=DictGetItemGuardAccessor(unscale_lora_layers) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | +- GuardManager: source=G['unscale_lora_layers'].__code__, accessed_by=GetAttrGuardAccessor(__code__) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | +- ID_MATCH: ___check_obj_id(G['unscale_lora_layers'].__code__, 140585209572928) # unscale_lora_layers(self, lora_scale) # diffusers/src/diffusers/models/transformers/transformer_flux.py:553 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | +- GuardManager: source=G['__builtins_dict___2'], accessed_by=DictGetItemGuardAccessor(__builtins_dict___2) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | +- GuardManager: source=G['__builtins_dict___2']['int'], accessed_by=DictGetItemGuardAccessor(int) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | +- ID_MATCH: ___check_obj_id(G['__builtins_dict___2']['int'], 140591004461248) # if isinstance(pos, int): # diffusers/src/diffusers/models/embeddings.py:605 in get_1d_rotary_pos_embed V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | +- GuardManager: source=G['__builtins_dict___2']['len'], accessed_by=DictGetItemGuardAccessor(len) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | +- ID_MATCH: ___check_obj_id(G['__builtins_dict___2']['len'], 140590981894672) # assert len(timesteps.shape) == 1, "Timesteps should be a 1d-array" # diffusers/src/diffusers/models/embeddings.py:54 in get_timestep_embedding V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | +- GuardManager: source=G['__builtins_dict___2']['set'], accessed_by=DictGetItemGuardAccessor(set) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | +- ID_MATCH: ___check_obj_id(G['__builtins_dict___2']['set'], 140591004484896) # attn_parameters = set(inspect.signature(self.processor.__call__).parameters.keys()) # diffusers/src/diffusers/models/attention_processor.py:479 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | +- GuardManager: source=G['__builtins_dict___2']['str'], accessed_by=DictGetItemGuardAccessor(str) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | +- ID_MATCH: ___check_obj_id(G['__builtins_dict___2']['str'], 140591004503168) # if isinstance(self.active_adapter, str): # peft/tuners/tuners_utils.py:530 in active_adapters V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | +- GuardManager: source=G['__builtins_dict___2']['bool'], accessed_by=DictGetItemGuardAccessor(bool) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | +- ID_MATCH: ___check_obj_id(G['__builtins_dict___2']['bool'], 140591004393472) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | +- GuardManager: source=G['__builtins_dict___2']['range'], accessed_by=DictGetItemGuardAccessor(range) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | +- ID_MATCH: ___check_obj_id(G['__builtins_dict___2']['range'], 140591004481376) # for i in range(n_axes): # diffusers/src/diffusers/models/embeddings.py:696 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | +- GuardManager: source=G['__builtins_dict___2']['enumerate'], accessed_by=DictGetItemGuardAccessor(enumerate) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | +- ID_MATCH: ___check_obj_id(G['__builtins_dict___2']['enumerate'], 140591004413056) # for index_block, block in enumerate(self.transformer_blocks): # diffusers/src/diffusers/models/transformers/transformer_flux.py:471 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | +- GuardManager: source=G['__builtins_dict___2']['isinstance'], accessed_by=DictGetItemGuardAccessor(isinstance) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | +- ID_MATCH: ___check_obj_id(G['__builtins_dict___2']['isinstance'], 140590981894352) # if isinstance(pos, int): # diffusers/src/diffusers/models/embeddings.py:605 in get_1d_rotary_pos_embed V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | +- GuardManager: source=G['__import_peft_dot_tuners_dot_tuners_utils'], accessed_by=DictGetItemGuardAccessor(__import_peft_dot_tuners_dot_tuners_utils) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | +- ID_MATCH: ___check_obj_id(G['__import_peft_dot_tuners_dot_tuners_utils'], 140585265503648) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | +- GuardManager: source=G['__import_peft_dot_tuners_dot_tuners_utils'].BaseTunerLayer, accessed_by=GetAttrGuardAccessor(BaseTunerLayer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | +- ID_MATCH: ___check_obj_id(G['__import_peft_dot_tuners_dot_tuners_utils'].BaseTunerLayer, 93831560473968) # from peft.tuners.tuners_utils import BaseTunerLayer # diffusers/src/diffusers/utils/peft_utils.py:113 in scale_lora_layers V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | +- GuardManager: source=G['__import_diffusers_dot_models_dot_attention'], accessed_by=DictGetItemGuardAccessor(__import_diffusers_dot_models_dot_attention) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | +- ID_MATCH: ___check_obj_id(G['__import_diffusers_dot_models_dot_attention'], 140585077988688) # if len(args) > 0 or kwargs.get("scale", None) is not None: # diffusers/src/diffusers/models/attention.py:1197 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | +- GuardManager: source=G['__import_diffusers_dot_models_dot_embeddings'], accessed_by=DictGetItemGuardAccessor(__import_diffusers_dot_models_dot_embeddings) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | +- ID_MATCH: ___check_obj_id(G['__import_diffusers_dot_models_dot_embeddings'], 140585079518960) # t_emb = get_timestep_embedding( # diffusers/src/diffusers/models/embeddings.py:764 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | +- GuardManager: source=G['__import_diffusers_dot_models_dot_embeddings'].np, accessed_by=GetAttrGuardAccessor(np) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | +- ID_MATCH: ___check_obj_id(G['__import_diffusers_dot_models_dot_embeddings'].np, 140590976137424) # if isinstance(pos, np.ndarray): # diffusers/src/diffusers/models/embeddings.py:607 in get_1d_rotary_pos_embed V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | +- GuardManager: source=G['__import_diffusers_dot_models_dot_embeddings'].np.ndarray, accessed_by=GetAttrGuardAccessor(ndarray) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | +- ID_MATCH: ___check_obj_id(G['__import_diffusers_dot_models_dot_embeddings'].np.ndarray, 140588028923008) # if isinstance(pos, np.ndarray): # diffusers/src/diffusers/models/embeddings.py:607 in get_1d_rotary_pos_embed V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | +- GuardManager: source=G['__import_diffusers_dot_models_dot_embeddings'].math, accessed_by=GetAttrGuardAccessor(math) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | +- ID_MATCH: ___check_obj_id(G['__import_diffusers_dot_models_dot_embeddings'].math, 140590979232480) # exponent = -math.log(max_period) * torch.arange( # diffusers/src/diffusers/models/embeddings.py:57 in get_timestep_embedding V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | +- GuardManager: source=G['__import_diffusers_dot_models_dot_embeddings'].math.log, accessed_by=GetAttrGuardAccessor(log) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | +- ID_MATCH: ___check_obj_id(G['__import_diffusers_dot_models_dot_embeddings'].math.log, 140590979235360) # exponent = -math.log(max_period) * torch.arange( # diffusers/src/diffusers/models/embeddings.py:57 in get_timestep_embedding V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | +- GuardManager: source=G['__import_diffusers_dot_models_dot_embeddings'].torch, accessed_by=GetAttrGuardAccessor(torch) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | +- ID_MATCH: ___check_obj_id(G['__import_diffusers_dot_models_dot_embeddings'].torch, 140590979095808) # exponent = -math.log(max_period) * torch.arange( # diffusers/src/diffusers/models/embeddings.py:57 in get_timestep_embedding V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | +- GuardManager: source=G['__import_diffusers_dot_models_dot_embeddings'].torch.cat, accessed_by=GetAttrGuardAccessor(cat) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | +- ID_MATCH: ___check_obj_id(G['__import_diffusers_dot_models_dot_embeddings'].torch.cat, 140590976095136) # emb = torch.cat([torch.sin(emb), torch.cos(emb)], dim=-1) # diffusers/src/diffusers/models/embeddings.py:69 in get_timestep_embedding V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | +- GuardManager: source=G['__import_diffusers_dot_models_dot_embeddings'].torch.cos, accessed_by=GetAttrGuardAccessor(cos) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | +- ID_MATCH: ___check_obj_id(G['__import_diffusers_dot_models_dot_embeddings'].torch.cos, 140590976096336) # emb = torch.cat([torch.sin(emb), torch.cos(emb)], dim=-1) # diffusers/src/diffusers/models/embeddings.py:69 in get_timestep_embedding V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | +- GuardManager: source=G['__import_diffusers_dot_models_dot_embeddings'].torch.exp, accessed_by=GetAttrGuardAccessor(exp) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | +- ID_MATCH: ___check_obj_id(G['__import_diffusers_dot_models_dot_embeddings'].torch.exp, 140590976097696) # emb = torch.exp(exponent) # diffusers/src/diffusers/models/embeddings.py:62 in get_timestep_embedding V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | +- GuardManager: source=G['__import_diffusers_dot_models_dot_embeddings'].torch.sin, accessed_by=GetAttrGuardAccessor(sin) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | +- ID_MATCH: ___check_obj_id(G['__import_diffusers_dot_models_dot_embeddings'].torch.sin, 140590976106096) # emb = torch.cat([torch.sin(emb), torch.cos(emb)], dim=-1) # diffusers/src/diffusers/models/embeddings.py:69 in get_timestep_embedding V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | +- GuardManager: source=G['__import_diffusers_dot_models_dot_embeddings'].torch.outer, accessed_by=GetAttrGuardAccessor(outer) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | +- ID_MATCH: ___check_obj_id(G['__import_diffusers_dot_models_dot_embeddings'].torch.outer, 140590976134544) # freqs = torch.outer(pos, freqs) # type: ignore # [S, D/2] # diffusers/src/diffusers/models/embeddings.py:616 in get_1d_rotary_pos_embed V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | +- GuardManager: source=G['__import_diffusers_dot_models_dot_embeddings'].torch.stack, accessed_by=GetAttrGuardAccessor(stack) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | +- ID_MATCH: ___check_obj_id(G['__import_diffusers_dot_models_dot_embeddings'].torch.stack, 140590976059488) # x_rotated = torch.stack([-x_imag, x_real], dim=-1).flatten(3) # diffusers/src/diffusers/models/embeddings.py:662 in apply_rotary_emb V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | +- GuardManager: source=G['__import_diffusers_dot_models_dot_embeddings'].torch.arange, accessed_by=GetAttrGuardAccessor(arange) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | +- ID_MATCH: ___check_obj_id(G['__import_diffusers_dot_models_dot_embeddings'].torch.arange, 140590975983808) # exponent = -math.log(max_period) * torch.arange( # diffusers/src/diffusers/models/embeddings.py:57 in get_timestep_embedding V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | +- GuardManager: source=G['__import_diffusers_dot_models_dot_embeddings'].torch.float32, accessed_by=GetAttrGuardAccessor(float32) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | +- EQUALS_MATCH: G['__import_diffusers_dot_models_dot_embeddings'].torch.float32 == torch.float32 # start=0, end=half_dim, dtype=torch.float32, device=timesteps.device # diffusers/src/diffusers/models/embeddings.py:58 in get_timestep_embedding V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | +- GuardManager: source=G['__import_diffusers_dot_models_dot_embeddings'].torch.float64, accessed_by=GetAttrGuardAccessor(float64) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | +- EQUALS_MATCH: G['__import_diffusers_dot_models_dot_embeddings'].torch.float64 == torch.float64 # freqs_dtype = torch.float32 if is_mps else torch.float64 # diffusers/src/diffusers/models/embeddings.py:695 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | +- GuardManager: source=G['__import_diffusers_dot_models_dot_embeddings'].apply_rotary_emb, accessed_by=GetAttrGuardAccessor(apply_rotary_emb) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | +- GuardManager: source=G['__import_diffusers_dot_models_dot_embeddings'].apply_rotary_emb.__code__, accessed_by=GetAttrGuardAccessor(__code__) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | +- ID_MATCH: ___check_obj_id(G['__import_diffusers_dot_models_dot_embeddings'].apply_rotary_emb.__code__, 140585079325072) # from .embeddings import apply_rotary_emb # diffusers/src/diffusers/models/attention_processor.py:1760 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | +- GuardManager: source=G['__import_diffusers_dot_models_dot_embeddings'].apply_rotary_emb, accessed_by=FuncDefaultsGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | +- GuardManager: source=G['__import_diffusers_dot_models_dot_embeddings'].apply_rotary_emb.__defaults__[0], accessed_by=GetItemGuardAccessor(0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | +- ID_MATCH: ___check_obj_id(G['__import_diffusers_dot_models_dot_embeddings'].apply_rotary_emb.__defaults__[0], 140591004393408) # if use_real: # diffusers/src/diffusers/models/embeddings.py:653 in apply_rotary_emb V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | +- GuardManager: source=G['__import_diffusers_dot_models_dot_embeddings'].apply_rotary_emb.__defaults__[1], accessed_by=GetItemGuardAccessor(1) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | +- EQUALS_MATCH: G['__import_diffusers_dot_models_dot_embeddings'].apply_rotary_emb.__defaults__[1] == -1 # if use_real_unbind_dim == -1: # diffusers/src/diffusers/models/embeddings.py:659 in apply_rotary_emb V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | +- GuardManager: source=G['__import_diffusers_dot_models_dot_embeddings'].get_timestep_embedding, accessed_by=GetAttrGuardAccessor(get_timestep_embedding) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | +- GuardManager: source=G['__import_diffusers_dot_models_dot_embeddings'].get_timestep_embedding.__code__, accessed_by=GetAttrGuardAccessor(__code__) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | +- ID_MATCH: ___check_obj_id(G['__import_diffusers_dot_models_dot_embeddings'].get_timestep_embedding.__code__, 140585079245968) # t_emb = get_timestep_embedding( # diffusers/src/diffusers/models/embeddings.py:764 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | +- GuardManager: source=G['__import_diffusers_dot_models_dot_embeddings'].get_timestep_embedding, accessed_by=FuncDefaultsGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | +- GuardManager: source=G['__import_diffusers_dot_models_dot_embeddings'].get_timestep_embedding.__defaults__[3], accessed_by=GetItemGuardAccessor(3) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | +- EQUALS_MATCH: G['__import_diffusers_dot_models_dot_embeddings'].get_timestep_embedding.__defaults__[3] == 10000 # exponent = -math.log(max_period) * torch.arange( # diffusers/src/diffusers/models/embeddings.py:57 in get_timestep_embedding V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | +- GuardManager: source=G['__import_diffusers_dot_models_dot_embeddings'].get_1d_rotary_pos_embed, accessed_by=GetAttrGuardAccessor(get_1d_rotary_pos_embed) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | +- GuardManager: source=G['__import_diffusers_dot_models_dot_embeddings'].get_1d_rotary_pos_embed.__code__, accessed_by=GetAttrGuardAccessor(__code__) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | +- ID_MATCH: ___check_obj_id(G['__import_diffusers_dot_models_dot_embeddings'].get_1d_rotary_pos_embed.__code__, 140585079258816) # cos, sin = get_1d_rotary_pos_embed( # diffusers/src/diffusers/models/embeddings.py:697 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | +- GuardManager: source=G['__import_diffusers_dot_models_dot_embeddings'].get_1d_rotary_pos_embed, accessed_by=FuncDefaultsGuardAccessor V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | +- GuardManager: source=G['__import_diffusers_dot_models_dot_embeddings'].get_1d_rotary_pos_embed.__defaults__[0], accessed_by=GetItemGuardAccessor(0) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | +- EQUALS_MATCH: G['__import_diffusers_dot_models_dot_embeddings'].get_1d_rotary_pos_embed.__defaults__[0] == 10000.0 # theta = theta * ntk_factor # diffusers/src/diffusers/models/embeddings.py:610 in get_1d_rotary_pos_embed V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | +- GuardManager: source=G['__import_diffusers_dot_models_dot_embeddings'].get_1d_rotary_pos_embed.__defaults__[2], accessed_by=GetItemGuardAccessor(2) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | +- EQUALS_MATCH: G['__import_diffusers_dot_models_dot_embeddings'].get_1d_rotary_pos_embed.__defaults__[2] == 1.0 # 1.0 # diffusers/src/diffusers/models/embeddings.py:612 in get_1d_rotary_pos_embed V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | +- GuardManager: source=G['__import_diffusers_dot_models_dot_embeddings'].get_1d_rotary_pos_embed.__defaults__[3], accessed_by=GetItemGuardAccessor(3) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | +- EQUALS_MATCH: G['__import_diffusers_dot_models_dot_embeddings'].get_1d_rotary_pos_embed.__defaults__[3] == 1.0 # theta = theta * ntk_factor # diffusers/src/diffusers/models/embeddings.py:610 in get_1d_rotary_pos_embed V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | +- GuardManager: source=G['__import_torch_dot_nn_dot_modules_dot_module'], accessed_by=DictGetItemGuardAccessor(__import_torch_dot_nn_dot_modules_dot_module) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | +- ID_MATCH: ___check_obj_id(G['__import_torch_dot_nn_dot_modules_dot_module'], 140585322849888) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | +- GuardManager: source=G['__import_torch_dot_nn_dot_modules_dot_module'].torch, accessed_by=GetAttrGuardAccessor(torch) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | +- ID_MATCH: ___check_obj_id(G['__import_torch_dot_nn_dot_modules_dot_module'].torch, 140590979095808) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | +- GuardManager: source=G['__import_torch_dot_nn_dot_modules_dot_module'].torch._C, accessed_by=GetAttrGuardAccessor(_C) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | +- ID_MATCH: ___check_obj_id(G['__import_torch_dot_nn_dot_modules_dot_module'].torch._C, 140590975498928) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | +- GuardManager: source=G['__import_torch_dot_nn_dot_modules_dot_module'].torch._C._get_tracing_state, accessed_by=GetAttrGuardAccessor(_get_tracing_state) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | +- ID_MATCH: ___check_obj_id(G['__import_torch_dot_nn_dot_modules_dot_module'].torch._C._get_tracing_state, 140585327896000) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | +- GuardManager: source=G['__import_torch_dot_nn_dot_modules_dot_module']._global_forward_hooks, accessed_by=GetAttrGuardAccessor(_global_forward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | +- TYPE_MATCH: ___check_type_id(G['__import_torch_dot_nn_dot_modules_dot_module']._global_forward_hooks, 140591004471168) # or _global_forward_hooks or _global_forward_pre_hooks): # nn/modules/module.py:1561 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | +- DICT_LENGTH: not G['__import_torch_dot_nn_dot_modules_dot_module']._global_forward_hooks # or _global_forward_hooks or _global_forward_pre_hooks): # nn/modules/module.py:1561 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | +- GuardManager: source=G['__import_torch_dot_nn_dot_modules_dot_module']._global_backward_hooks, accessed_by=GetAttrGuardAccessor(_global_backward_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | +- TYPE_MATCH: ___check_type_id(G['__import_torch_dot_nn_dot_modules_dot_module']._global_backward_hooks, 140591004471168) # or _global_backward_pre_hooks or _global_backward_hooks # nn/modules/module.py:1560 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | +- DICT_LENGTH: not G['__import_torch_dot_nn_dot_modules_dot_module']._global_backward_hooks # or _global_backward_pre_hooks or _global_backward_hooks # nn/modules/module.py:1560 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | +- GuardManager: source=G['__import_torch_dot_nn_dot_modules_dot_module']._global_forward_pre_hooks, accessed_by=GetAttrGuardAccessor(_global_forward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | +- TYPE_MATCH: ___check_type_id(G['__import_torch_dot_nn_dot_modules_dot_module']._global_forward_pre_hooks, 140591004471168) # or _global_forward_hooks or _global_forward_pre_hooks): # nn/modules/module.py:1561 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | +- DICT_LENGTH: not G['__import_torch_dot_nn_dot_modules_dot_module']._global_forward_pre_hooks # or _global_forward_hooks or _global_forward_pre_hooks): # nn/modules/module.py:1561 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | +- GuardManager: source=G['__import_torch_dot_nn_dot_modules_dot_module']._global_backward_pre_hooks, accessed_by=GetAttrGuardAccessor(_global_backward_pre_hooks) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | +- TYPE_MATCH: ___check_type_id(G['__import_torch_dot_nn_dot_modules_dot_module']._global_backward_pre_hooks, 140591004471168) # or _global_backward_pre_hooks or _global_backward_hooks # nn/modules/module.py:1560 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | +- DICT_LENGTH: not G['__import_torch_dot_nn_dot_modules_dot_module']._global_backward_pre_hooks # or _global_backward_pre_hooks or _global_backward_hooks # nn/modules/module.py:1560 in _call_impl V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | +- GuardManager: source=G['__import_diffusers_dot_models_dot_activations'], accessed_by=DictGetItemGuardAccessor(__import_diffusers_dot_models_dot_activations) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | +- ID_MATCH: ___check_obj_id(G['__import_diffusers_dot_models_dot_activations'], 140585079141968) # return F.gelu(gate, approximate=self.approximate) # diffusers/src/diffusers/models/activations.py:83 in gelu V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | +- GuardManager: source=G['__import_diffusers_dot_models_dot_activations'].F, accessed_by=GetAttrGuardAccessor(F) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | +- ID_MATCH: ___check_obj_id(G['__import_diffusers_dot_models_dot_activations'].F, 140585319847216) # return F.gelu(gate, approximate=self.approximate) # diffusers/src/diffusers/models/activations.py:83 in gelu V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | +- GuardManager: source=G['__import_diffusers_dot_models_dot_activations'].F.gelu, accessed_by=GetAttrGuardAccessor(gelu) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | +- ID_MATCH: ___check_obj_id(G['__import_diffusers_dot_models_dot_activations'].F.gelu, 140585328409424) # return F.gelu(gate, approximate=self.approximate) # diffusers/src/diffusers/models/activations.py:83 in gelu V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | +- GuardManager: source=G['__import_diffusers_dot_models_dot_normalization'], accessed_by=DictGetItemGuardAccessor(__import_diffusers_dot_models_dot_normalization) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | +- ID_MATCH: ___check_obj_id(G['__import_diffusers_dot_models_dot_normalization'], 140585079754240) # variance = hidden_states.to(torch.float32).pow(2).mean(-1, keepdim=True) # diffusers/src/diffusers/models/normalization.py:427 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | +- GuardManager: source=G['__import_diffusers_dot_models_dot_normalization'].torch, accessed_by=GetAttrGuardAccessor(torch) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | +- ID_MATCH: ___check_obj_id(G['__import_diffusers_dot_models_dot_normalization'].torch, 140590979095808) # variance = hidden_states.to(torch.float32).pow(2).mean(-1, keepdim=True) # diffusers/src/diffusers/models/normalization.py:427 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | +- GuardManager: source=G['__import_diffusers_dot_models_dot_normalization'].torch.chunk, accessed_by=GetAttrGuardAccessor(chunk) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | +- ID_MATCH: ___check_obj_id(G['__import_diffusers_dot_models_dot_normalization'].torch.chunk, 140590976095296) # scale, shift = torch.chunk(emb, 2, dim=1) # diffusers/src/diffusers/models/normalization.py:305 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | +- GuardManager: source=G['__import_diffusers_dot_models_dot_normalization'].torch.rsqrt, accessed_by=GetAttrGuardAccessor(rsqrt) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | +- ID_MATCH: ___check_obj_id(G['__import_diffusers_dot_models_dot_normalization'].torch.rsqrt, 140590976058128) # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | +- GuardManager: source=G['__import_diffusers_dot_models_dot_normalization'].torch.float16, accessed_by=GetAttrGuardAccessor(float16) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | +- EQUALS_MATCH: G['__import_diffusers_dot_models_dot_normalization'].torch.float16 == torch.float16 # if self.weight.dtype in [torch.float16, torch.bfloat16]: # diffusers/src/diffusers/models/normalization.py:432 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | +- GuardManager: source=G['__import_diffusers_dot_models_dot_normalization'].torch.float32, accessed_by=GetAttrGuardAccessor(float32) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | +- EQUALS_MATCH: G['__import_diffusers_dot_models_dot_normalization'].torch.float32 == torch.float32 # variance = hidden_states.to(torch.float32).pow(2).mean(-1, keepdim=True) # diffusers/src/diffusers/models/normalization.py:427 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | +- GuardManager: source=G['__import_diffusers_dot_models_dot_normalization'].torch.bfloat16, accessed_by=GetAttrGuardAccessor(bfloat16) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | +- EQUALS_MATCH: G['__import_diffusers_dot_models_dot_normalization'].torch.bfloat16 == torch.bfloat16 # if self.weight.dtype in [torch.float16, torch.bfloat16]: # diffusers/src/diffusers/models/normalization.py:432 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | +- GuardManager: source=G['__import_diffusers_dot_models_dot_attention_processor'], accessed_by=DictGetItemGuardAccessor(__import_diffusers_dot_models_dot_attention_processor) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | +- ID_MATCH: ___check_obj_id(G['__import_diffusers_dot_models_dot_attention_processor'], 140585079143248) # attn_parameters = set(inspect.signature(self.processor.__call__).parameters.keys()) # diffusers/src/diffusers/models/attention_processor.py:479 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | +- GuardManager: source=G['__import_diffusers_dot_models_dot_attention_processor'].F, accessed_by=GetAttrGuardAccessor(F) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | +- ID_MATCH: ___check_obj_id(G['__import_diffusers_dot_models_dot_attention_processor'].F, 140585319847216) # hidden_states = F.scaled_dot_product_attention(query, key, value, dropout_p=0.0, is_causal=False) # diffusers/src/diffusers/models/attention_processor.py:1765 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | +- GuardManager: source=G['__import_diffusers_dot_models_dot_attention_processor'].F.scaled_dot_product_attention, accessed_by=GetAttrGuardAccessor(scaled_dot_product_attention) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | +- ID_MATCH: ___check_obj_id(G['__import_diffusers_dot_models_dot_attention_processor'].F.scaled_dot_product_attention, 140585328298960) # hidden_states = F.scaled_dot_product_attention(query, key, value, dropout_p=0.0, is_causal=False) # diffusers/src/diffusers/models/attention_processor.py:1765 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | +- GuardManager: source=G['__import_diffusers_dot_models_dot_attention_processor'].torch, accessed_by=GetAttrGuardAccessor(torch) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | +- ID_MATCH: ___check_obj_id(G['__import_diffusers_dot_models_dot_attention_processor'].torch, 140590979095808) # query = torch.cat([encoder_hidden_states_query_proj, query], dim=2) # diffusers/src/diffusers/models/attention_processor.py:1755 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | +- GuardManager: source=G['__import_diffusers_dot_models_dot_attention_processor'].torch.cat, accessed_by=GetAttrGuardAccessor(cat) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | +- ID_MATCH: ___check_obj_id(G['__import_diffusers_dot_models_dot_attention_processor'].torch.cat, 140590976095136) # query = torch.cat([encoder_hidden_states_query_proj, query], dim=2) # diffusers/src/diffusers/models/attention_processor.py:1755 in __call__ V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | +- GuardManager: source=G['__import_diffusers_dot_models_dot_attention_processor'].inspect, accessed_by=GetAttrGuardAccessor(inspect) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | +- ID_MATCH: ___check_obj_id(G['__import_diffusers_dot_models_dot_attention_processor'].inspect, 140590979824624) # attn_parameters = set(inspect.signature(self.processor.__call__).parameters.keys()) # diffusers/src/diffusers/models/attention_processor.py:479 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | +- GuardManager: source=G['__import_diffusers_dot_models_dot_attention_processor'].inspect.signature, accessed_by=GetAttrGuardAccessor(signature) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | +- GuardManager: source=G['__import_diffusers_dot_models_dot_attention_processor'].inspect.signature.__code__, accessed_by=GetAttrGuardAccessor(__code__) V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] | | | | | | +- ID_MATCH: ___check_obj_id(G['__import_diffusers_dot_models_dot_attention_processor'].inspect.signature.__code__, 140590977567008) # attn_parameters = set(inspect.signature(self.processor.__call__).parameters.keys()) # diffusers/src/diffusers/models/attention_processor.py:479 in forward V0909 14:37:54.938000 140590996850496 torch/_dynamo/guards.py:2148] [0/1] [__guards] V0909 14:38:41.531000 140590996850496 torch/_dynamo/guards.py:2611] [0/2] [__recompiles] Recompiling function forward in /home/sayak/diffusers/src/diffusers/models/transformers/transformer_flux.py:388 V0909 14:38:41.531000 140590996850496 torch/_dynamo/guards.py:2611] [0/2] [__recompiles] triggered by the following guard failure(s): V0909 14:38:41.531000 140590996850496 torch/_dynamo/guards.py:2611] [0/2] [__recompiles] - ___check_obj_id(L['self'].transformer_blocks[0].ff.net[0].proj, 140581773425056) # hidden_states = self.proj(hidden_states) # diffusers/src/diffusers/models/activations.py:88 in forward W0909 14:41:06.189000 140590996850496 torch/fx/experimental/symbolic_shapes.py:4449] [0/2] xindex is not in var_ranges, defaulting to unknown range. V0909 14:41:40.774000 140590996850496 torch/_dynamo/guards.py:2169] [0/2] [__guards] GUARDS: V0909 14:41:40.774000 140590996850496 torch/_dynamo/guards.py:2148] [0/2] [__guards] V0909 14:41:40.774000 140590996850496 torch/_dynamo/guards.py:2148] [0/2] [__guards] TREE_GUARD_MANAGER: V0909 14:41:40.774000 140590996850496 torch/_dynamo/guards.py:2148] [0/2] [__guards] +- RootGuardManager V0909 14:41:40.774000 140590996850496 torch/_dynamo/guards.py:2148] [0/2] [__guards] | +- DEFAULT_DEVICE: utils_device.CURRENT_DEVICE == None # _dynamo/output_graph.py:460 in init_ambient_guards V0909 14:41:40.774000 140590996850496 torch/_dynamo/guards.py:2148] [0/2] [__guards] | +- GLOBAL_STATE: ___check_global_state() V0909 14:41:40.774000 140590996850496 torch/_dynamo/guards.py:2148] [0/2] [__guards] | +- GuardManager: source=L['self'], accessed_by=DictGetItemGuardAccessor(self) V0909 14:41:40.774000 140590996850496 torch/_dynamo/guards.py:2148] [0/2] [__guards] | | +- ID_MATCH: ___check_obj_id(L['self'], 140581773415408) # scale_lora_layers(self, lora_scale) # diffusers/src/diffusers/models/transformers/transformer_flux.py:436 in forward V0909 14:41:40.774000 140590996850496 torch/_dynamo/guards.py:2148] [0/2] [__guards] | | +- GuardManager: source=L['self'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:41:40.774000 140590996850496 torch/_dynamo/guards.py:2148] [0/2] [__guards] | | | +- GuardManager: source=L['self'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:41:40.774000 140590996850496 torch/_dynamo/guards.py:2148] [0/2] [__guards] | | | | +- ID_MATCH: ___check_obj_id(L['self'].training, 140591004393440) # scale_lora_layers(self, lora_scale) # diffusers/src/diffusers/models/transformers/transformW0909 14:44:58.815000 140590996850496 torch/fx/experimental/symbolic_shapes.py:4449] [0/3] xindex is not in var_ranges, defaulting to unknown range. V0909 14:42:34.069000 140590996850496 torch/_dynamo/guards.py:2611] [0/3] [__recompiles] Recompiling function forward in /home/sayak/diffusers/src/diffusers/models/transformers/transformer_flux.py:388 V0909 14:42:34.069000 140590996850496 torch/_dynamo/guards.py:2611] [0/3] [__recompiles] triggered by the following guard failure(s): V0909 14:42:34.069000 140590996850496 torch/_dynamo/guards.py:2611] [0/3] [__recompiles] - ___check_obj_id(L['self'].transformer_blocks[0].ff.net[0].proj, 140581773425056) # hidden_states = self.proj(hidden_states) # diffusers/src/diffusers/models/activations.py:88 in forward V0909 14:45:31.052000 140590996850496 torch/_dynamo/guards.py:2169] [0/3] [__guards] GUARDS: V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] TREE_GUARD_MANAGER: V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] +- RootGuardManager V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | +- DEFAULT_DEVICE: utils_device.CURRENT_DEVICE == None # _dynamo/output_graph.py:460 in init_ambient_guards V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | +- GLOBAL_STATE: ___check_global_state() V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | +- GuardManager: source=L['self'], accessed_by=DictGetItemGuardAccessor(self) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | +- ID_MATCH: ___check_obj_id(L['self'], 140581773415408) # scale_lora_layers(self, lora_scale) # diffusers/src/diffusers/models/transformers/transformer_flux.py:436 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | +- GuardManager: source=L['self'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | +- GuardManager: source=L['self'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | +- ID_MATCH: ___check_obj_id(L['self'].training, 140591004393440) # scale_lora_layers(self, lora_scale) # diffusers/src/diffusers/models/transformers/transformer_flux.py:436 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | +- GuardManager: source=L['self']._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | +- GuardManager: source=L['self'].norm_out, accessed_by=DictGetItemGuardAccessor(norm_out) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | +- ID_MATCH: ___check_obj_id(L['self'].norm_out, 140581770788240) # hidden_states = self.norm_out(hidden_states, temb) # diffusers/src/diffusers/models/transformers/transformer_flux.py:548 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | +- GuardManager: source=L['self'].norm_out.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].norm_out.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | +- GuardManager: source=L['self'].norm_out.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].norm_out.training, 140591004393440) # hidden_states = self.norm_out(hidden_states, temb) # diffusers/src/diffusers/models/transformers/transformer_flux.py:548 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | +- GuardManager: source=L['self'].norm_out._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- GuardManager: source=L['self'].norm_out.norm, accessed_by=DictGetItemGuardAccessor(norm) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].norm_out.norm, 140581765087584) # x = self.norm(x) * (1 + scale)[:, None, :] + shift[:, None, :] # diffusers/src/diffusers/models/normalization.py:306 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- GuardManager: source=L['self'].norm_out.norm.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].norm_out.norm.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].norm_out.norm.training, 140591004393440) # x = self.norm(x) * (1 + scale)[:, None, :] + shift[:, None, :] # diffusers/src/diffusers/models/normalization.py:306 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- GuardManager: source=L['self'].norm_out.silu, accessed_by=DictGetItemGuardAccessor(silu) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].norm_out.silu, 140581765087488) # emb = self.linear(self.silu(conditioning_embedding).to(x.dtype)) # diffusers/src/diffusers/models/normalization.py:304 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- GuardManager: source=L['self'].norm_out.silu.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].norm_out.silu.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].norm_out.silu.training, 140591004393440) # emb = self.linear(self.silu(conditioning_embedding).to(x.dtype)) # diffusers/src/diffusers/models/normalization.py:304 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- GuardManager: source=L['self'].norm_out.linear, accessed_by=DictGetItemGuardAccessor(linear) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].norm_out.linear, 140581765087536) # emb = self.linear(self.silu(conditioning_embedding).to(x.dtype)) # diffusers/src/diffusers/models/normalization.py:304 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- GuardManager: source=L['self'].norm_out.linear.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].norm_out.linear.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].norm_out.linear.training, 140591004393440) # emb = self.linear(self.silu(conditioning_embedding).to(x.dtype)) # diffusers/src/diffusers/models/normalization.py:304 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | +- GuardManager: source=L['self'].norm_out._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | +- GuardManager: source=L['self'].norm_out._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | +- GuardManager: source=L['self'].norm_out._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | +- GuardManager: source=L['self'].norm_out._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | +- GuardManager: source=L['self'].proj_out, accessed_by=DictGetItemGuardAccessor(proj_out) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | +- ID_MATCH: ___check_obj_id(L['self'].proj_out, 140581765087440) # output = self.proj_out(hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:549 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | +- GuardManager: source=L['self'].proj_out.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | +- GuardManager: source=L['self'].proj_out.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].proj_out.training, 140591004393440) # output = self.proj_out(hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:549 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | +- GuardManager: source=L['self'].pos_embed, accessed_by=DictGetItemGuardAccessor(pos_embed) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | +- ID_MATCH: ___check_obj_id(L['self'].pos_embed, 140581773415024) # image_rotary_emb = self.pos_embed(ids) # diffusers/src/diffusers/models/transformers/transformer_flux.py:469 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | +- GuardManager: source=L['self'].pos_embed.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].pos_embed.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | +- GuardManager: source=L['self'].pos_embed.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].pos_embed.training, 140591004393440) # image_rotary_emb = self.pos_embed(ids) # diffusers/src/diffusers/models/transformers/transformer_flux.py:469 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | +- GuardManager: source=L['self'].pos_embed.axes_dim, accessed_by=DictGetItemGuardAccessor(axes_dim) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].pos_embed.axes_dim, 140591004488512) # self.axes_dim[i], pos[:, i], repeat_interleave_real=True, use_real=True, freqs_dtype=freqs_dtype # diffusers/src/diffusers/models/embeddings.py:698 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- LENGTH_CHECK: len(L['self'].pos_embed.axes_dim) == 3 # self.axes_dim[i], pos[:, i], repeat_interleave_real=True, use_real=True, freqs_dtype=freqs_dtype # diffusers/src/diffusers/models/embeddings.py:698 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- GuardManager: source=L['self'].pos_embed.axes_dim[0], accessed_by=TupleGetItemGuardAccessor(0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- EQUALS_MATCH: L['self'].pos_embed.axes_dim[0] == 16 # cos, sin = get_1d_rotary_pos_embed( # diffusers/src/diffusers/models/embeddings.py:697 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- GuardManager: source=L['self'].pos_embed.axes_dim[1], accessed_by=TupleGetItemGuardAccessor(1) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- EQUALS_MATCH: L['self'].pos_embed.axes_dim[1] == 56 # cos, sin = get_1d_rotary_pos_embed( # diffusers/src/diffusers/models/embeddings.py:697 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- GuardManager: source=L['self'].pos_embed.axes_dim[2], accessed_by=TupleGetItemGuardAccessor(2) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- EQUALS_MATCH: L['self'].pos_embed.axes_dim[2] == 56 # cos, sin = get_1d_rotary_pos_embed( # diffusers/src/diffusers/models/embeddings.py:697 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | +- GuardManager: source=L['self'].pos_embed._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | +- GuardManager: source=L['self'].pos_embed._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | +- GuardManager: source=L['self'].pos_embed._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | +- GuardManager: source=L['self'].pos_embed._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | +- GuardManager: source=L['self'].x_embedder, accessed_by=DictGetItemGuardAccessor(x_embedder) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | +- ID_MATCH: ___check_obj_id(L['self'].x_embedder, 140581773423280) # hidden_states = self.x_embedder(hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:442 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | +- GuardManager: source=L['self'].x_embedder.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | +- GuardManager: source=L['self'].x_embedder.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].x_embedder.training, 140591004393440) # hidden_states = self.x_embedder(hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:442 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | +- GuardManager: source=L['self'].time_text_embed, accessed_by=DictGetItemGuardAccessor(time_text_embed) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | +- ID_MATCH: ___check_obj_id(L['self'].time_text_embed, 140581773422416) # else self.time_text_embed(timestep, guidance, pooled_projections) # diffusers/src/diffusers/models/transformers/transformer_flux.py:452 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | +- GuardManager: source=L['self'].time_text_embed.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].time_text_embed.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | +- GuardManager: source=L['self'].time_text_embed.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].time_text_embed.training, 140591004393440) # else self.time_text_embed(timestep, guidance, pooled_projections) # diffusers/src/diffusers/models/transformers/transformer_flux.py:452 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | +- GuardManager: source=L['self'].time_text_embed._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- GuardManager: source=L['self'].time_text_embed.time_proj, accessed_by=DictGetItemGuardAccessor(time_proj) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].time_text_embed.time_proj, 140581773415216) # timesteps_proj = self.time_proj(timestep) # diffusers/src/diffusers/models/embeddings.py:1059 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- GuardManager: source=L['self'].time_text_embed.time_proj.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].time_text_embed.time_proj.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].time_text_embed.time_proj.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].time_text_embed.time_proj.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].time_text_embed.time_proj.training, 140591004393440) # timesteps_proj = self.time_proj(timestep) # diffusers/src/diffusers/models/embeddings.py:1059 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].time_text_embed.time_proj.scale, accessed_by=DictGetItemGuardAccessor(scale) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- EQUALS_MATCH: L['self'].time_text_embed.time_proj.scale == 1 # scale=self.scale, # diffusers/src/diffusers/models/embeddings.py:769 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].time_text_embed.time_proj.num_channels, accessed_by=DictGetItemGuardAccessor(num_channels) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- EQUALS_MATCH: L['self'].time_text_embed.time_proj.num_channels == 256 # self.num_channels, # diffusers/src/diffusers/models/embeddings.py:766 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].time_text_embed.time_proj._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].time_text_embed.time_proj._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].time_text_embed.time_proj.flip_sin_to_cos, accessed_by=DictGetItemGuardAccessor(flip_sin_to_cos) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].time_text_embed.time_proj.flip_sin_to_cos, 140591004393408) # flip_sin_to_cos=self.flip_sin_to_cos, # diffusers/src/diffusers/models/embeddings.py:767 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].time_text_embed.time_proj._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].time_text_embed.time_proj._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].time_text_embed.time_proj.downscale_freq_shift, accessed_by=DictGetItemGuardAccessor(downscale_freq_shift) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- EQUALS_MATCH: L['self'].time_text_embed.time_proj.downscale_freq_shift == 0 # downscale_freq_shift=self.downscale_freq_shift, # diffusers/src/diffusers/models/embeddings.py:768 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- GuardManager: source=L['self'].time_text_embed.text_embedder, accessed_by=DictGetItemGuardAccessor(text_embedder) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].time_text_embed.text_embedder, 140581773415120) # pooled_projections = self.text_embedder(pooled_projection) # diffusers/src/diffusers/models/embeddings.py:1067 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- GuardManager: source=L['self'].time_text_embed.text_embedder.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].time_text_embed.text_embedder.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].time_text_embed.text_embedder.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].time_text_embed.text_embedder.training, 140591004393440) # pooled_projections = self.text_embedder(pooled_projection) # diffusers/src/diffusers/models/embeddings.py:1067 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].time_text_embed.text_embedder._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].time_text_embed.text_embedder.act_1, accessed_by=DictGetItemGuardAccessor(act_1) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].time_text_embed.text_embedder.act_1, 140581773423184) # hidden_states = self.act_1(hidden_states) # diffusers/src/diffusers/models/embeddings.py:1511 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].time_text_embed.text_embedder.act_1.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].time_text_embed.text_embedder.act_1.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].time_text_embed.text_embedder.act_1.training, 140591004393440) # hidden_states = self.act_1(hidden_states) # diffusers/src/diffusers/models/embeddings.py:1511 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].time_text_embed.text_embedder.linear_1, accessed_by=DictGetItemGuardAccessor(linear_1) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].time_text_embed.text_embedder.linear_1, 140581773422944) # hidden_states = self.linear_1(caption) # diffusers/src/diffusers/models/embeddings.py:1510 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].time_text_embed.text_embedder.linear_1.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].time_text_embed.text_embedder.linear_1.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].time_text_embed.text_embedder.linear_1.training, 140591004393440) # hidden_states = self.linear_1(caption) # diffusers/src/diffusers/models/embeddings.py:1510 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].time_text_embed.text_embedder.linear_2, accessed_by=DictGetItemGuardAccessor(linear_2) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].time_text_embed.text_embedder.linear_2, 140581773423232) # hidden_states = self.linear_2(hidden_states) # diffusers/src/diffusers/models/embeddings.py:1512 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].time_text_embed.text_embedder.linear_2.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].time_text_embed.text_embedder.linear_2.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].time_text_embed.text_embedder.linear_2.training, 140591004393440) # hidden_states = self.linear_2(hidden_states) # diffusers/src/diffusers/models/embeddings.py:1512 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].time_text_embed.text_embedder._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].time_text_embed.text_embedder._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].time_text_embed.text_embedder._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].time_text_embed.text_embedder._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- GuardManager: source=L['self'].time_text_embed.guidance_embedder, accessed_by=DictGetItemGuardAccessor(guidance_embedder) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].time_text_embed.guidance_embedder, 140581773422512) # guidance_emb = self.guidance_embedder(guidance_proj.to(dtype=pooled_projection.dtype)) # (N, D) # diffusers/src/diffusers/models/embeddings.py:1063 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- GuardManager: source=L['self'].time_text_embed.guidance_embedder.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].time_text_embed.guidance_embedder.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].time_text_embed.guidance_embedder.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].time_text_embed.guidance_embedder.training, 140591004393440) # guidance_emb = self.guidance_embedder(guidance_proj.to(dtype=pooled_projection.dtype)) # (N, D) # diffusers/src/diffusers/models/embeddings.py:1063 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].time_text_embed.guidance_embedder._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].time_text_embed.guidance_embedder.act, accessed_by=DictGetItemGuardAccessor(act) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].time_text_embed.guidance_embedder.act, 140585079194528) # if self.act is not None: # diffusers/src/diffusers/models/embeddings.py:745 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].time_text_embed.guidance_embedder.act.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].time_text_embed.guidance_embedder.act.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].time_text_embed.guidance_embedder.act.training, 140591004393440) # if self.act is not None: # diffusers/src/diffusers/models/embeddings.py:745 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].time_text_embed.guidance_embedder.linear_1, accessed_by=DictGetItemGuardAccessor(linear_1) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].time_text_embed.guidance_embedder.linear_1, 140581773422224) # sample = self.linear_1(sample) # diffusers/src/diffusers/models/embeddings.py:743 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].time_text_embed.guidance_embedder.linear_1.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].time_text_embed.guidance_embedder.linear_1.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].time_text_embed.guidance_embedder.linear_1.training, 140591004393440) # sample = self.linear_1(sample) # diffusers/src/diffusers/models/embeddings.py:743 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].time_text_embed.guidance_embedder.linear_2, accessed_by=DictGetItemGuardAccessor(linear_2) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].time_text_embed.guidance_embedder.linear_2, 140581773421840) # sample = self.linear_2(sample) # diffusers/src/diffusers/models/embeddings.py:748 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].time_text_embed.guidance_embedder.linear_2.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].time_text_embed.guidance_embedder.linear_2.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].time_text_embed.guidance_embedder.linear_2.training, 140591004393440) # sample = self.linear_2(sample) # diffusers/src/diffusers/models/embeddings.py:748 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].time_text_embed.guidance_embedder.post_act, accessed_by=DictGetItemGuardAccessor(post_act) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].time_text_embed.guidance_embedder.post_act, 140591004478624) # if self.post_act is not None: # diffusers/src/diffusers/models/embeddings.py:750 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].time_text_embed.guidance_embedder._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].time_text_embed.guidance_embedder._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].time_text_embed.guidance_embedder._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].time_text_embed.guidance_embedder._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- GuardManager: source=L['self'].time_text_embed.guidance_embedder.forward, accessed_by=GetAttrGuardAccessor(forward) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].time_text_embed.guidance_embedder.forward, accessed_by=FuncDefaultsGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].time_text_embed.guidance_embedder.forward.__defaults__[0], accessed_by=GetItemGuardAccessor(0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].time_text_embed.guidance_embedder.forward.__defaults__[0], 140591004478624) # if condition is not None: # diffusers/src/diffusers/models/embeddings.py:741 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- GuardManager: source=L['self'].time_text_embed.timestep_embedder, accessed_by=DictGetItemGuardAccessor(timestep_embedder) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].time_text_embed.timestep_embedder, 140581773415072) # timesteps_emb = self.timestep_embedder(timesteps_proj.to(dtype=pooled_projection.dtype)) # (N, D) # diffusers/src/diffusers/models/embeddings.py:1060 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- GuardManager: source=L['self'].time_text_embed.timestep_embedder.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].time_text_embed.timestep_embedder.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].time_text_embed.timestep_embedder.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].time_text_embed.timestep_embedder.training, 140591004393440) # timesteps_emb = self.timestep_embedder(timesteps_proj.to(dtype=pooled_projection.dtype)) # (N, D) # diffusers/src/diffusers/models/embeddings.py:1060 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].time_text_embed.timestep_embedder._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].time_text_embed.timestep_embedder.act, accessed_by=DictGetItemGuardAccessor(act) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].time_text_embed.timestep_embedder.act, 140585079194528) # if self.act is not None: # diffusers/src/diffusers/models/embeddings.py:745 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].time_text_embed.timestep_embedder.act.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].time_text_embed.timestep_embedder.act.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].time_text_embed.timestep_embedder.act.training, 140591004393440) # if self.act is not None: # diffusers/src/diffusers/models/embeddings.py:745 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].time_text_embed.timestep_embedder.linear_1, accessed_by=DictGetItemGuardAccessor(linear_1) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].time_text_embed.timestep_embedder.linear_1, 140581773422992) # sample = self.linear_1(sample) # diffusers/src/diffusers/models/embeddings.py:743 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].time_text_embed.timestep_embedder.linear_1.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].time_text_embed.timestep_embedder.linear_1.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].time_text_embed.timestep_embedder.linear_1.training, 140591004393440) # sample = self.linear_1(sample) # diffusers/src/diffusers/models/embeddings.py:743 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].time_text_embed.timestep_embedder.linear_2, accessed_by=DictGetItemGuardAccessor(linear_2) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].time_text_embed.timestep_embedder.linear_2, 140581773422848) # sample = self.linear_2(sample) # diffusers/src/diffusers/models/embeddings.py:748 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].time_text_embed.timestep_embedder.linear_2.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].time_text_embed.timestep_embedder.linear_2.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].time_text_embed.timestep_embedder.linear_2.training, 140591004393440) # sample = self.linear_2(sample) # diffusers/src/diffusers/models/embeddings.py:748 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].time_text_embed.timestep_embedder.post_act, accessed_by=DictGetItemGuardAccessor(post_act) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].time_text_embed.timestep_embedder.post_act, 140591004478624) # if self.post_act is not None: # diffusers/src/diffusers/models/embeddings.py:750 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].time_text_embed.timestep_embedder._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].time_text_embed.timestep_embedder._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].time_text_embed.timestep_embedder._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].time_text_embed.timestep_embedder._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- GuardManager: source=L['self'].time_text_embed.timestep_embedder.forward, accessed_by=GetAttrGuardAccessor(forward) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].time_text_embed.timestep_embedder.forward, accessed_by=FuncDefaultsGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].time_text_embed.timestep_embedder.forward.__defaults__[0], accessed_by=GetItemGuardAccessor(0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].time_text_embed.timestep_embedder.forward.__defaults__[0], 140591004478624) # if condition is not None: # diffusers/src/diffusers/models/embeddings.py:741 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | +- GuardManager: source=L['self'].time_text_embed._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | +- GuardManager: source=L['self'].time_text_embed._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | +- GuardManager: source=L['self'].time_text_embed._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | +- GuardManager: source=L['self'].time_text_embed._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | +- GuardManager: source=L['self'].context_embedder, accessed_by=DictGetItemGuardAccessor(context_embedder) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | +- ID_MATCH: ___check_obj_id(L['self'].context_embedder, 140581773423136) # encoder_hidden_states = self.context_embedder(encoder_hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:454 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | +- GuardManager: source=L['self'].context_embedder.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | +- GuardManager: source=L['self'].context_embedder.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].context_embedder.training, 140591004393440) # encoder_hidden_states = self.context_embedder(encoder_hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:454 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | +- GuardManager: source=L['self'].transformer_blocks, accessed_by=DictGetItemGuardAccessor(transformer_blocks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks, 140581773423376) # for index_block, block in enumerate(self.transformer_blocks): # diffusers/src/diffusers/models/transformers/transformer_flux.py:471 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | +- GuardManager: source=L['self'].transformer_blocks.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | +- GuardManager: source=L['self'].transformer_blocks.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks.training, 140591004393440) # for index_block, block in enumerate(self.transformer_blocks): # diffusers/src/diffusers/models/transformers/transformer_flux.py:471 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | +- GuardManager: source=L['self'].transformer_blocks[0], accessed_by=GetItemGuardAccessor(0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0], 140581773423328) # for index_block, block in enumerate(self.transformer_blocks): # diffusers/src/diffusers/models/transformers/transformer_flux.py:471 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[0].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].training, 140591004393440) # for index_block, block in enumerate(self.transformer_blocks): # diffusers/src/diffusers/models/transformers/transformer_flux.py:471 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff, accessed_by=DictGetItemGuardAccessor(ff) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].ff, 140581773424768) # ff_output = self.ff(norm_hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:185 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[0].ff.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].ff.training, 140591004393440) # ff_output = self.ff(norm_hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:185 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff.net, accessed_by=DictGetItemGuardAccessor(net) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].ff.net, 140581773425008) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[0].ff.net, 93831537618768) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- LENGTH_CHECK: len(L['self'].transformer_blocks[0].ff.net) == 3 # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff.net.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff.net.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].ff.net.training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff.net[0], accessed_by=GetItemGuardAccessor(0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].ff.net[0], 140581773424960) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff.net[0].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[0].ff.net[0].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff.net[0].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].ff.net[0].training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff.net[0]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff.net[0].proj, accessed_by=DictGetItemGuardAccessor(proj) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].ff.net[0].proj, 140537204952944) # hidden_states = self.proj(hidden_states) # diffusers/src/diffusers/models/activations.py:88 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff.net[0].proj.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[0].ff.net[0].proj.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff.net[0].proj.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].ff.net[0].proj.training, 140591004393408) # hidden_states = self.proj(hidden_states) # diffusers/src/diffusers/models/activations.py:88 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff.net[0].proj._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff.net[0].proj.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].ff.net[0].proj.lora_A, 140537204953136) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff.net[0].proj.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff.net[0].proj.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].ff.net[0].proj.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff.net[0].proj.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].ff.net[0].proj.lora_A['default_0'], 140537204949680) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff.net[0].proj.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff.net[0].proj.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].ff.net[0].proj.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff.net[0].proj.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff.net[0].proj.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].ff.net[0].proj.lora_A['default_0'].weight, 140537312282640) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff.net[0].proj.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].ff.net[0].proj.lora_B, 140537204952608) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff.net[0].proj.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff.net[0].proj.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].ff.net[0].proj.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff.net[0].proj.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].ff.net[0].proj.lora_B['default_0'], 140537204949344) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff.net[0].proj.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff.net[0].proj.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].ff.net[0].proj.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff.net[0].proj.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].ff.net[0].proj.base_layer, 140581773425056) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff.net[0].proj.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff.net[0].proj.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].ff.net[0].proj.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff.net[0].proj.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].ff.net[0].proj.lora_dropout, 140537204952368) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff.net[0].proj.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff.net[0].proj.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].ff.net[0].proj.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff.net[0].proj.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].ff.net[0].proj.lora_dropout['default_0'], 140537204949728) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff.net[0].proj.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff.net[0].proj.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].ff.net[0].proj.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff.net[0].proj.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[0].ff.net[0].proj.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[0].ff.net[0].proj.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff.net[0].proj.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[0].ff.net[0].proj.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff.net[0].proj.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[0].ff.net[0].proj.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[0].ff.net[0].proj.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff.net[0].proj.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].ff.net[0].proj.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff.net[0].proj._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff.net[0].proj._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff.net[0].proj.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[0].ff.net[0].proj.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[0].ff.net[0].proj.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff.net[0].proj._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].ff.net[0].proj._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff.net[0].proj._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff.net[0].proj._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff.net[0].proj._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[0].ff.net[0].proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[0].ff.net[0].proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff.net[0].approximate, accessed_by=DictGetItemGuardAccessor(approximate) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[0].ff.net[0].approximate == 'tanh' # return F.gelu(gate, approximate=self.approximate) # diffusers/src/diffusers/models/activations.py:83 in gelu V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff.net[0]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff.net[0]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff.net[0]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff.net[0]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff.net[1], accessed_by=GetItemGuardAccessor(1) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].ff.net[1], 140581773425104) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff.net[1].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff.net[1].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].ff.net[1].training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff.net[2], accessed_by=GetItemGuardAccessor(2) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].ff.net[2], 140537204956256) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff.net[2].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[0].ff.net[2].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff.net[2].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].ff.net[2].training, 140591004393408) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff.net[2]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff.net[2].lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].ff.net[2].lora_A, 140537204955488) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff.net[2].lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff.net[2].lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].ff.net[2].lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff.net[2].lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].ff.net[2].lora_A['default_0'], 140537204964896) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff.net[2].lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff.net[2].lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].ff.net[2].lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff.net[2].lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff.net[2].lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].ff.net[2].lora_A['default_0'].weight, 140537312283760) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff.net[2].lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].ff.net[2].lora_B, 140537204957360) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff.net[2].lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff.net[2].lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].ff.net[2].lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff.net[2].lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].ff.net[2].lora_B['default_0'], 140537204961440) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff.net[2].lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff.net[2].lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].ff.net[2].lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff.net[2].base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].ff.net[2].base_layer, 140581773425152) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff.net[2].base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff.net[2].base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].ff.net[2].base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff.net[2].lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].ff.net[2].lora_dropout, 140537204950880) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff.net[2].lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff.net[2].lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].ff.net[2].lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff.net[2].lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].ff.net[2].lora_dropout['default_0'], 140537204956064) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff.net[2].lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff.net[2].lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].ff.net[2].lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff.net[2].scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[0].ff.net[2].scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[0].ff.net[2].scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff.net[2].scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[0].ff.net[2].scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff.net[2].use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[0].ff.net[2].use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[0].ff.net[2].use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff.net[2].use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].ff.net[2].use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff.net[2]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff.net[2]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff.net[2].merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[0].ff.net[2].merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[0].ff.net[2].merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff.net[2]._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].ff.net[2]._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff.net[2]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff.net[2]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff.net[2]._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[0].ff.net[2]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[0].ff.net[2]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn, accessed_by=DictGetItemGuardAccessor(attn) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn, 140581773423952) # attn_output, context_attn_output = self.attn( # diffusers/src/diffusers/models/transformers/transformer_flux.py:172 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[0].attn.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.training, 140591004393440) # attn_output, context_attn_output = self.attn( # diffusers/src/diffusers/models/transformers/transformer_flux.py:172 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_k, accessed_by=DictGetItemGuardAccessor(to_k) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.to_k, 140526788991184) # key = attn.to_k(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1717 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[0].attn.to_k.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.to_k.training, 140591004393408) # key = attn.to_k(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1717 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_k._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_k.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.to_k.lora_A, 140526788997616) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_k.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_k.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.to_k.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_k.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.to_k.lora_A['default_0'], 140526788995600) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_k.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_k.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.to_k.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_k.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_k.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.to_k.lora_A['default_0'].weight, 140537312638064) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_k.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.to_k.lora_B, 140526788990608) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_k.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_k.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.to_k.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_k.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.to_k.lora_B['default_0'], 140526788995648) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_k.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_k.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.to_k.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_k.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.to_k.base_layer, 140581773424144) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_k.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_k.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.to_k.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_k.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.to_k.lora_dropout, 140526788988496) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_k.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_k.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.to_k.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_k.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.to_k.lora_dropout['default_0'], 140526788995504) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_k.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_k.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.to_k.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_k.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[0].attn.to_k.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[0].attn.to_k.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_k.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[0].attn.to_k.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_k.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[0].attn.to_k.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[0].attn.to_k.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_k.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.to_k.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_k._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_k._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_k.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[0].attn.to_k.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[0].attn.to_k.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_k._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.to_k._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_k._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_k._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_k._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[0].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[0].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_q, accessed_by=DictGetItemGuardAccessor(to_q) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.to_q, 140526788985952) # query = attn.to_q(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1716 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[0].attn.to_q.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.to_q.training, 140591004393408) # query = attn.to_q(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1716 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_q._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_q.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.to_q.lora_A, 140526788986768) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_q.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_q.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.to_q.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_q.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.to_q.lora_A['default_0'], 140526788984896) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_q.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_q.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.to_q.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_q.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_q.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.to_q.lora_A['default_0'].weight, 140526373432560) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_q.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.to_q.lora_B, 140526788991712) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_q.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_q.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.to_q.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_q.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.to_q.lora_B['default_0'], 140526788985232) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_q.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_q.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.to_q.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_q.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.to_q.base_layer, 140581773424240) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_q.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_q.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.to_q.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_q.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.to_q.lora_dropout, 140526788990944) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_q.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_q.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.to_q.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_q.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.to_q.lora_dropout['default_0'], 140526788986816) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_q.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_q.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.to_q.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_q.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[0].attn.to_q.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[0].attn.to_q.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_q.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[0].attn.to_q.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_q.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[0].attn.to_q.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[0].attn.to_q.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_q.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.to_q.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_q._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_q._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_q.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[0].attn.to_q.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[0].attn.to_q.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_q._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.to_q._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_q._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_q._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_q._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[0].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[0].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_v, accessed_by=DictGetItemGuardAccessor(to_v) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.to_v, 140526788995840) # value = attn.to_v(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1718 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_v.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[0].attn.to_v.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_v.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.to_v.training, 140591004393408) # value = attn.to_v(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1718 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_v._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_v.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.to_v.lora_A, 140526789000832) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_v.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_v.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.to_v.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_v.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.to_v.lora_A['default_0'], 140526788999824) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_v.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_v.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.to_v.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_v.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_v.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.to_v.lora_A['default_0'].weight, 140537312629104) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_v.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.to_v.lora_B, 140526788996224) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_v.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_v.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.to_v.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_v.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.to_v.lora_B['default_0'], 140526788994928) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_v.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_v.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.to_v.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_v.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.to_v.base_layer, 140581773424336) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_v.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_v.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.to_v.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_v.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.to_v.lora_dropout, 140526789000160) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_v.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_v.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.to_v.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_v.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.to_v.lora_dropout['default_0'], 140526788997184) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_v.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_v.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.to_v.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_v.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[0].attn.to_v.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[0].attn.to_v.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_v.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[0].attn.to_v.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_v.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[0].attn.to_v.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[0].attn.to_v.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_v.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.to_v.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_v._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_v._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_v.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[0].attn.to_v.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[0].attn.to_v.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_v._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.to_v._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_v._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_v._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_v._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[0].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[0].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.norm_k, accessed_by=DictGetItemGuardAccessor(norm_k) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.norm_k, 140581773424192) # if attn.norm_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1729 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.norm_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[0].attn.norm_k.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.norm_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.norm_k.training, 140591004393440) # if attn.norm_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1729 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.norm_k.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[0].attn.norm_k.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.norm_k._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.norm_k.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.norm_k.weight, 140581772779872) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.norm_k._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.norm_k._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.norm_k._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.norm_k._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.norm_q, accessed_by=DictGetItemGuardAccessor(norm_q) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.norm_q, 140581773424096) # if attn.norm_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1727 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.norm_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[0].attn.norm_q.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.norm_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.norm_q.training, 140591004393440) # if attn.norm_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1727 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.norm_q.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[0].attn.norm_q.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.norm_q._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.norm_q.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.norm_q.weight, 140581906594960) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.norm_q._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.norm_q._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.norm_q._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.norm_q._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_out, accessed_by=DictGetItemGuardAccessor(to_out) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.to_out, 140581773424528) # hidden_states = attn.to_out[0](hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1776 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_out.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_out.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.to_out.training, 140591004393440) # hidden_states = attn.to_out[0](hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1776 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_out[0], accessed_by=GetItemGuardAccessor(0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.to_out[0], 140526788993584) # hidden_states = attn.to_out[0](hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1776 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_out[0].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[0].attn.to_out[0].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_out[0].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.to_out[0].training, 140591004393408) # hidden_states = attn.to_out[0](hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1776 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_out[0]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_out[0].lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.to_out[0].lora_A, 140526788995024) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_out[0].lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_out[0].lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.to_out[0].lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_out[0].lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.to_out[0].lora_A['default_0'], 140526788985616) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_out[0].lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_out[0].lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.to_out[0].lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_out[0].lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_out[0].lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.to_out[0].lora_A['default_0'].weight, 140526697167856) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_out[0].lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.to_out[0].lora_B, 140526788998864) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_out[0].lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_out[0].lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.to_out[0].lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_out[0].lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.to_out[0].lora_B['default_0'], 140526788986432) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_out[0].lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_out[0].lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.to_out[0].lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_out[0].base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.to_out[0].base_layer, 140581773424576) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_out[0].base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_out[0].base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.to_out[0].base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_out[0].lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.to_out[0].lora_dropout, 140526788997568) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_out[0].lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_out[0].lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.to_out[0].lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_out[0].lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.to_out[0].lora_dropout['default_0'], 140526788999728) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_out[0].lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_out[0].lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.to_out[0].lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_out[0].scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[0].attn.to_out[0].scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[0].attn.to_out[0].scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_out[0].scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[0].attn.to_out[0].scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_out[0].use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[0].attn.to_out[0].use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[0].attn.to_out[0].use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_out[0].use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.to_out[0].use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_out[0]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_out[0]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_out[0].merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[0].attn.to_out[0].merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[0].attn.to_out[0].merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_out[0]._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.to_out[0]._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_out[0]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_out[0]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_out[0]._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[0].attn.to_out[0]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[0].attn.to_out[0]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_out[1], accessed_by=GetItemGuardAccessor(1) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.to_out[1], 140581773424624) # hidden_states = attn.to_out[1](hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1778 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_out[1].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_out[1].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.to_out[1].training, 140591004393440) # hidden_states = attn.to_out[1](hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1778 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.add_k_proj, accessed_by=DictGetItemGuardAccessor(add_k_proj) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.add_k_proj, 140526788988208) # encoder_hidden_states_key_proj = attn.add_k_proj(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1736 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.add_k_proj.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[0].attn.add_k_proj.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.add_k_proj.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.add_k_proj.training, 140591004393408) # encoder_hidden_states_key_proj = attn.add_k_proj(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1736 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.add_k_proj._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.add_k_proj.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.add_k_proj.lora_A, 140526788991376) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.add_k_proj.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.add_k_proj.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.add_k_proj.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.add_k_proj.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.add_k_proj.lora_A['default_0'], 140526788992528) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.add_k_proj.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.add_k_proj.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.add_k_proj.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.add_k_proj.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.add_k_proj.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.add_k_proj.lora_A['default_0'].weight, 140537312628544) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.add_k_proj.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.add_k_proj.lora_B, 140526788998288) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.add_k_proj.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.add_k_proj.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.add_k_proj.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.add_k_proj.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.add_k_proj.lora_B['default_0'], 140526788988352) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.add_k_proj.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.add_k_proj.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.add_k_proj.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.add_k_proj.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.add_k_proj.base_layer, 140581773424384) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.add_k_proj.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.add_k_proj.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.add_k_proj.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.add_k_proj.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.add_k_proj.lora_dropout, 140526788998192) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.add_k_proj.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.add_k_proj.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.add_k_proj.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.add_k_proj.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.add_k_proj.lora_dropout['default_0'], 140526788992480) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.add_k_proj.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.add_k_proj.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.add_k_proj.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.add_k_proj.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[0].attn.add_k_proj.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[0].attn.add_k_proj.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.add_k_proj.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[0].attn.add_k_proj.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.add_k_proj.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[0].attn.add_k_proj.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[0].attn.add_k_proj.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.add_k_proj.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.add_k_proj.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.add_k_proj._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.add_k_proj._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.add_k_proj.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[0].attn.add_k_proj.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[0].attn.add_k_proj.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.add_k_proj._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.add_k_proj._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.add_k_proj._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.add_k_proj._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.add_k_proj._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[0].attn.add_k_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[0].attn.add_k_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.add_q_proj, accessed_by=DictGetItemGuardAccessor(add_q_proj) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.add_q_proj, 140526788997280) # encoder_hidden_states_query_proj = attn.add_q_proj(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1735 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.add_q_proj.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[0].attn.add_q_proj.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.add_q_proj.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.add_q_proj.training, 140591004393408) # encoder_hidden_states_query_proj = attn.add_q_proj(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1735 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.add_q_proj._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.add_q_proj.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.add_q_proj.lora_A, 140526788997856) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.add_q_proj.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.add_q_proj.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.add_q_proj.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.add_q_proj.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.add_q_proj.lora_A['default_0'], 140526788997904) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.add_q_proj.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.add_q_proj.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.add_q_proj.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.add_q_proj.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.add_q_proj.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.add_q_proj.lora_A['default_0'].weight, 140526697157696) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.add_q_proj.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.add_q_proj.lora_B, 140526788989264) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.add_q_proj.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.add_q_proj.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.add_q_proj.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.add_q_proj.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.add_q_proj.lora_B['default_0'], 140526788995792) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.add_q_proj.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.add_q_proj.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.add_q_proj.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.add_q_proj.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.add_q_proj.base_layer, 140581773424480) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.add_q_proj.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.add_q_proj.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.add_q_proj.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.add_q_proj.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.add_q_proj.lora_dropout, 140526788998720) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.add_q_proj.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.add_q_proj.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.add_q_proj.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.add_q_proj.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.add_q_proj.lora_dropout['default_0'], 140526788990752) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.add_q_proj.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.add_q_proj.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.add_q_proj.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.add_q_proj.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[0].attn.add_q_proj.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[0].attn.add_q_proj.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.add_q_proj.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[0].attn.add_q_proj.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.add_q_proj.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[0].attn.add_q_proj.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[0].attn.add_q_proj.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.add_q_proj.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.add_q_proj.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.add_q_proj._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.add_q_proj._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.add_q_proj.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[0].attn.add_q_proj.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[0].attn.add_q_proj.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.add_q_proj._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.add_q_proj._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.add_q_proj._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.add_q_proj._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.add_q_proj._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[0].attn.add_q_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[0].attn.add_q_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.add_v_proj, accessed_by=DictGetItemGuardAccessor(add_v_proj) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.add_v_proj, 140526788992336) # encoder_hidden_states_value_proj = attn.add_v_proj(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1737 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.add_v_proj.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[0].attn.add_v_proj.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.add_v_proj.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.add_v_proj.training, 140591004393408) # encoder_hidden_states_value_proj = attn.add_v_proj(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1737 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.add_v_proj._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.add_v_proj.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.add_v_proj.lora_A, 140526788992624) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.add_v_proj.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.add_v_proj.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.add_v_proj.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.add_v_proj.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.add_v_proj.lora_A['default_0'], 140526788991424) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.add_v_proj.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.add_v_proj.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.add_v_proj.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.add_v_proj.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.add_v_proj.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.add_v_proj.lora_A['default_0'].weight, 140526697160176) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.add_v_proj.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.add_v_proj.lora_B, 140526788999584) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.add_v_proj.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.add_v_proj.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.add_v_proj.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.add_v_proj.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.add_v_proj.lora_B['default_0'], 140526788997808) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.add_v_proj.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.add_v_proj.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.add_v_proj.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.add_v_proj.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.add_v_proj.base_layer, 140581773424432) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.add_v_proj.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.add_v_proj.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.add_v_proj.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.add_v_proj.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.add_v_proj.lora_dropout, 140526788989360) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.add_v_proj.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.add_v_proj.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.add_v_proj.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.add_v_proj.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.add_v_proj.lora_dropout['default_0'], 140526788991280) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.add_v_proj.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.add_v_proj.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.add_v_proj.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.add_v_proj.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[0].attn.add_v_proj.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[0].attn.add_v_proj.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.add_v_proj.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[0].attn.add_v_proj.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.add_v_proj.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[0].attn.add_v_proj.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[0].attn.add_v_proj.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.add_v_proj.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.add_v_proj.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.add_v_proj._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.add_v_proj._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.add_v_proj.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[0].attn.add_v_proj.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[0].attn.add_v_proj.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.add_v_proj._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.add_v_proj._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.add_v_proj._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.add_v_proj._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.add_v_proj._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[0].attn.add_v_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[0].attn.add_v_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_add_out, accessed_by=DictGetItemGuardAccessor(to_add_out) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.to_add_out, 140526788985472) # encoder_hidden_states = attn.to_add_out(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1779 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_add_out.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[0].attn.to_add_out.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_add_out.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.to_add_out.training, 140591004393408) # encoder_hidden_states = attn.to_add_out(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1779 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_add_out._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_add_out.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.to_add_out.lora_A, 140526788986144) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_add_out.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_add_out.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.to_add_out.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_add_out.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.to_add_out.lora_A['default_0'], 140526788991472) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_add_out.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_add_out.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.to_add_out.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_add_out.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_add_out.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.to_add_out.lora_A['default_0'].weight, 140537315368272) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_add_out.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.to_add_out.lora_B, 140526788999920) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_add_out.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_add_out.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.to_add_out.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_add_out.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.to_add_out.lora_B['default_0'], 140526788987392) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_add_out.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_add_out.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.to_add_out.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_add_out.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.to_add_out.base_layer, 140581773424672) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_add_out.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_add_out.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.to_add_out.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_add_out.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.to_add_out.lora_dropout, 140526788986096) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_add_out.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_add_out.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.to_add_out.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_add_out.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.to_add_out.lora_dropout['default_0'], 140526788987200) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_add_out.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_add_out.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.to_add_out.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_add_out.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[0].attn.to_add_out.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[0].attn.to_add_out.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_add_out.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[0].attn.to_add_out.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_add_out.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[0].attn.to_add_out.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[0].attn.to_add_out.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_add_out.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.to_add_out.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_add_out._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_add_out._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_add_out.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[0].attn.to_add_out.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[0].attn.to_add_out.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_add_out._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.to_add_out._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_add_out._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_add_out._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.to_add_out._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[0].attn.to_add_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[0].attn.to_add_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.norm_added_k, accessed_by=DictGetItemGuardAccessor(norm_added_k) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.norm_added_k, 140581773424816) # if attn.norm_added_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1751 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.norm_added_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[0].attn.norm_added_k.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.norm_added_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.norm_added_k.training, 140591004393440) # if attn.norm_added_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1751 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.norm_added_k.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[0].attn.norm_added_k.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.norm_added_k._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.norm_added_k.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.norm_added_k.weight, 140581766060672) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.norm_added_k._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.norm_added_k._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.norm_added_k._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.norm_added_k._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.norm_added_q, accessed_by=DictGetItemGuardAccessor(norm_added_q) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.norm_added_q, 140581773424720) # if attn.norm_added_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1749 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.norm_added_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[0].attn.norm_added_q.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.norm_added_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.norm_added_q.training, 140591004393440) # if attn.norm_added_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1749 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.norm_added_q.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[0].attn.norm_added_q.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.norm_added_q._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.norm_added_q.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.norm_added_q.weight, 140581765982592) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.norm_added_q._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.norm_added_q._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.norm_added_q._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.norm_added_q._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.heads, accessed_by=DictGetItemGuardAccessor(heads) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[0].attn.heads == 24 # head_dim = inner_dim // attn.heads # diffusers/src/diffusers/models/attention_processor.py:1721 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn.processor, accessed_by=DictGetItemGuardAccessor(processor) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[0].attn.processor, 93831581524080) # attn_parameters = set(inspect.signature(self.processor.__call__).parameters.keys()) # diffusers/src/diffusers/models/attention_processor.py:479 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].attn.processor, 140581773423904) # return self.processor( # diffusers/src/diffusers/models/attention_processor.py:490 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].attn._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].norm1, accessed_by=DictGetItemGuardAccessor(norm1) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].norm1, 140581773423472) # norm_hidden_states, gate_msa, shift_mlp, scale_mlp, gate_mlp = self.norm1(hidden_states, emb=temb) # diffusers/src/diffusers/models/transformers/transformer_flux.py:165 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].norm1.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[0].norm1.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].norm1.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].norm1.training, 140591004393440) # norm_hidden_states, gate_msa, shift_mlp, scale_mlp, gate_mlp = self.norm1(hidden_states, emb=temb) # diffusers/src/diffusers/models/transformers/transformer_flux.py:165 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].norm1.emb, accessed_by=DictGetItemGuardAccessor(emb) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].norm1.emb, 140591004478624) # if self.emb is not None: # diffusers/src/diffusers/models/normalization.py:135 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].norm1._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].norm1.norm, accessed_by=DictGetItemGuardAccessor(norm) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].norm1.norm, 140581773423664) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:139 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].norm1.norm.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].norm1.norm.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].norm1.norm.training, 140591004393440) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:139 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].norm1.silu, accessed_by=DictGetItemGuardAccessor(silu) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].norm1.silu, 140581773423568) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].norm1.silu.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].norm1.silu.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].norm1.silu.training, 140591004393440) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].norm1.linear, accessed_by=DictGetItemGuardAccessor(linear) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].norm1.linear, 140526788986576) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].norm1.linear.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[0].norm1.linear.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].norm1.linear.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].norm1.linear.training, 140591004393408) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].norm1.linear._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].norm1.linear.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].norm1.linear.lora_A, 140526788999536) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].norm1.linear.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].norm1.linear.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].norm1.linear.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].norm1.linear.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].norm1.linear.lora_A['default_0'], 140526788987344) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].norm1.linear.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].norm1.linear.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].norm1.linear.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].norm1.linear.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].norm1.linear.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].norm1.linear.lora_A['default_0'].weight, 140526555867760) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].norm1.linear.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].norm1.linear.lora_B, 140526788987776) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].norm1.linear.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].norm1.linear.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].norm1.linear.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].norm1.linear.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].norm1.linear.lora_B['default_0'], 140526788987632) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].norm1.linear.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].norm1.linear.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].norm1.linear.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].norm1.linear.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].norm1.linear.base_layer, 140581773423616) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].norm1.linear.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].norm1.linear.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].norm1.linear.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].norm1.linear.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].norm1.linear.lora_dropout, 140526788996416) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].norm1.linear.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].norm1.linear.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].norm1.linear.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].norm1.linear.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].norm1.linear.lora_dropout['default_0'], 140526788996752) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].norm1.linear.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].norm1.linear.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].norm1.linear.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].norm1.linear.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[0].norm1.linear.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[0].norm1.linear.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].norm1.linear.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[0].norm1.linear.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].norm1.linear.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[0].norm1.linear.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[0].norm1.linear.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].norm1.linear.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].norm1.linear.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].norm1.linear._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].norm1.linear._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[0].norm1.linear._active_adapter, 140591004458752) # for active_adapter in self.active_adapters: # peft/tuners/lora/layer.py:559 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: len(L['self'].transformer_blocks[0].norm1.linear._active_adapter) == 1 # for active_adapter in self.active_adapters: # peft/tuners/lora/layer.py:559 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[0].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[0].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[0].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[0].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[0].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[0].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[0].ff.net[2]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[0].ff.net[2]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[1].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[1].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[1].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[1].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[1].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[1].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[1].ff.net[2]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[1].ff.net[2]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[2].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[2].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[2].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[2].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[2].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[2].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[2].ff.net[2]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[2].ff.net[2]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[3].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[3].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[3].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[3].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[3].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[3].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[3].ff.net[2]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[3].ff.net[2]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[4].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[4].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[4].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[4].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[4].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[4].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[4].ff.net[2]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[4].ff.net[2]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[5].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[5].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[5].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[5].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[5].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[5].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[5].ff.net[2]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[5].ff.net[2]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[6].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[6].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[6].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[6].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[6].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[6].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[6].ff.net[2]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[6].ff.net[2]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[7].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[7].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[7].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[7].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[7].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[7].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[7].ff.net[2]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[7].ff.net[2]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[8].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[8].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[8].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[8].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[8].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[8].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[8].ff.net[2]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[8].ff.net[2]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[9].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[9].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[9].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[9].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[9].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[9].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[9].ff.net[2]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[9].ff.net[2]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[10].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[10].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[10].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[10].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[10].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[10].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[10].ff.net[2]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[10].ff.net[2]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[11].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[11].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[11].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[11].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[11].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[11].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[11].ff.net[2]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[11].ff.net[2]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[12].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[12].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[12].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[12].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[12].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[12].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[12].ff.net[2]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[12].ff.net[2]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[13].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[13].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[13].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[13].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[13].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[13].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[13].ff.net[2]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[13].ff.net[2]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[14].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[14].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[14].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[14].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[14].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[14].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[14].ff.net[2]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[14].ff.net[2]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[15].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[15].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[15].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[15].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[15].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[15].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[15].ff.net[2]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[15].ff.net[2]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[16].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[16].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[16].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[16].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[16].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[16].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[16].ff.net[2]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[16].ff.net[2]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[17].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[17].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[17].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[17].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[17].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[17].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[17].ff.net[2]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[17].ff.net[2]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[18].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[18].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[18].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[18].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[18].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[18].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[18].ff.net[2]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[18].ff.net[2]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[1].norm1.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[1].norm1.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[2].norm1.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[2].norm1.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[3].norm1.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[3].norm1.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[4].norm1.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[4].norm1.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[5].norm1.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[5].norm1.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[6].norm1.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[6].norm1.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[7].norm1.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[7].norm1.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[8].norm1.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[8].norm1.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[9].norm1.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[9].norm1.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[10].norm1.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[10].norm1.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[11].norm1.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[11].norm1.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[12].norm1.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[12].norm1.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[13].norm1.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[13].norm1.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[14].norm1.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[14].norm1.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[15].norm1.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[15].norm1.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[16].norm1.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[16].norm1.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[17].norm1.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[17].norm1.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[18].norm1.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[18].norm1.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[0].attn.to_out[0]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[0].attn.to_out[0]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[0].ff.net[0].proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[0].ff.net[0].proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[1].attn.to_out[0]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[1].attn.to_out[0]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[1].ff.net[0].proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[1].ff.net[0].proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[2].attn.to_out[0]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[2].attn.to_out[0]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[2].ff.net[0].proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[2].ff.net[0].proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[3].attn.to_out[0]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[3].attn.to_out[0]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[3].ff.net[0].proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[3].ff.net[0].proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[4].attn.to_out[0]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[4].attn.to_out[0]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[4].ff.net[0].proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[4].ff.net[0].proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[5].attn.to_out[0]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[5].attn.to_out[0]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[5].ff.net[0].proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[5].ff.net[0].proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[6].attn.to_out[0]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[6].attn.to_out[0]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[6].ff.net[0].proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[6].ff.net[0].proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[7].attn.to_out[0]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[7].attn.to_out[0]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[7].ff.net[0].proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[7].ff.net[0].proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[8].attn.to_out[0]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[8].attn.to_out[0]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[8].ff.net[0].proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[8].ff.net[0].proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[9].attn.to_out[0]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[9].attn.to_out[0]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[9].ff.net[0].proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[9].ff.net[0].proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[0].proj_mlp._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[0].proj_mlp._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[0].proj_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[0].proj_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[1].proj_mlp._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[1].proj_mlp._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[1].proj_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[1].proj_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[2].proj_mlp._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[2].proj_mlp._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[2].proj_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[2].proj_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[3].proj_mlp._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[3].proj_mlp._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[3].proj_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[3].proj_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[4].proj_mlp._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[4].proj_mlp._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[4].proj_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[4].proj_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[5].proj_mlp._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[5].proj_mlp._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[5].proj_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[5].proj_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[6].proj_mlp._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[6].proj_mlp._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[6].proj_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[6].proj_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[7].proj_mlp._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[7].proj_mlp._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[7].proj_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[7].proj_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[8].proj_mlp._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[8].proj_mlp._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[8].proj_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[8].proj_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[9].proj_mlp._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[9].proj_mlp._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[9].proj_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[9].proj_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[0].attn.add_k_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[0].attn.add_k_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[0].attn.add_q_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[0].attn.add_q_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[0].attn.add_v_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[0].attn.add_v_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[0].attn.to_add_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[0].attn.to_add_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[10].attn.to_out[0]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[10].attn.to_out[0]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[10].ff.net[0].proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[10].ff.net[0].proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[11].attn.to_out[0]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[11].attn.to_out[0]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[11].ff.net[0].proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[11].ff.net[0].proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[12].attn.to_out[0]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[12].attn.to_out[0]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[12].ff.net[0].proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[12].ff.net[0].proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[13].attn.to_out[0]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[13].attn.to_out[0]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[13].ff.net[0].proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[13].ff.net[0].proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[14].attn.to_out[0]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[14].attn.to_out[0]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[14].ff.net[0].proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[14].ff.net[0].proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[15].attn.to_out[0]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[15].attn.to_out[0]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[15].ff.net[0].proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[15].ff.net[0].proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[16].attn.to_out[0]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[16].attn.to_out[0]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[16].ff.net[0].proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[16].ff.net[0].proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[17].attn.to_out[0]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[17].attn.to_out[0]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[17].ff.net[0].proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[17].ff.net[0].proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[18].attn.to_out[0]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[18].attn.to_out[0]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[18].ff.net[0].proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[18].ff.net[0].proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[1].attn.add_k_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[1].attn.add_k_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[1].attn.add_q_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[1].attn.add_q_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[1].attn.add_v_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[1].attn.add_v_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[1].attn.to_add_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[1].attn.to_add_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[2].attn.add_k_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[2].attn.add_k_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[2].attn.add_q_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[2].attn.add_q_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[2].attn.add_v_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[2].attn.add_v_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[2].attn.to_add_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[2].attn.to_add_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[3].attn.add_k_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[3].attn.add_k_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[3].attn.add_q_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[3].attn.add_q_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[3].attn.add_v_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[3].attn.add_v_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[3].attn.to_add_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[3].attn.to_add_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[4].attn.add_k_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[4].attn.add_k_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[4].attn.add_q_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[4].attn.add_q_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[4].attn.add_v_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[4].attn.add_v_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[4].attn.to_add_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[4].attn.to_add_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[5].attn.add_k_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[5].attn.add_k_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[5].attn.add_q_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[5].attn.add_q_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[5].attn.add_v_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[5].attn.add_v_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[5].attn.to_add_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[5].attn.to_add_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[6].attn.add_k_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[6].attn.add_k_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[6].attn.add_q_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[6].attn.add_q_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[6].attn.add_v_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[6].attn.add_v_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[6].attn.to_add_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[6].attn.to_add_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[7].attn.add_k_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[7].attn.add_k_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[7].attn.add_q_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[7].attn.add_q_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[7].attn.add_v_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[7].attn.add_v_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[7].attn.to_add_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[7].attn.to_add_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[8].attn.add_k_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[8].attn.add_k_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[8].attn.add_q_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[8].attn.add_q_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[8].attn.add_v_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[8].attn.add_v_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[8].attn.to_add_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[8].attn.to_add_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[9].attn.add_k_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[9].attn.add_k_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[9].attn.add_q_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[9].attn.add_q_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[9].attn.add_v_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[9].attn.add_v_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[9].attn.to_add_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[9].attn.to_add_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[0].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[0].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[0].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[0].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[0].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[0].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[10].proj_mlp._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[10].proj_mlp._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[10].proj_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[10].proj_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[11].proj_mlp._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[11].proj_mlp._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[11].proj_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[11].proj_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[12].proj_mlp._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[12].proj_mlp._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[12].proj_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[12].proj_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[13].proj_mlp._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[13].proj_mlp._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[13].proj_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[13].proj_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[14].proj_mlp._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[14].proj_mlp._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[14].proj_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[14].proj_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[15].proj_mlp._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[15].proj_mlp._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[15].proj_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[15].proj_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[16].proj_mlp._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[16].proj_mlp._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[16].proj_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[16].proj_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[17].proj_mlp._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[17].proj_mlp._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[17].proj_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[17].proj_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[18].proj_mlp._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[18].proj_mlp._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[18].proj_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[18].proj_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[19].proj_mlp._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[19].proj_mlp._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[19].proj_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[19].proj_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[1].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[1].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[1].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[1].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[1].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[1].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[20].proj_mlp._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[20].proj_mlp._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[20].proj_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[20].proj_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[21].proj_mlp._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[21].proj_mlp._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[21].proj_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[21].proj_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[22].proj_mlp._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[22].proj_mlp._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[22].proj_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[22].proj_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[23].proj_mlp._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[23].proj_mlp._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[23].proj_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[23].proj_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[24].proj_mlp._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[24].proj_mlp._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[24].proj_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[24].proj_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[25].proj_mlp._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[25].proj_mlp._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[25].proj_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[25].proj_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[26].proj_mlp._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[26].proj_mlp._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[26].proj_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[26].proj_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[27].proj_mlp._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[27].proj_mlp._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[27].proj_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[27].proj_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[28].proj_mlp._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[28].proj_mlp._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[28].proj_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[28].proj_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[29].proj_mlp._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[29].proj_mlp._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[29].proj_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[29].proj_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[2].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[2].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[2].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[2].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[2].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[2].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[30].proj_mlp._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[30].proj_mlp._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[30].proj_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[30].proj_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[31].proj_mlp._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[31].proj_mlp._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[31].proj_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[31].proj_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[32].proj_mlp._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[32].proj_mlp._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[32].proj_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[32].proj_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[33].proj_mlp._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[33].proj_mlp._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[33].proj_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[33].proj_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[34].proj_mlp._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[34].proj_mlp._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[34].proj_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[34].proj_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[35].proj_mlp._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[35].proj_mlp._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[35].proj_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[35].proj_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[36].proj_mlp._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[36].proj_mlp._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[36].proj_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[36].proj_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[37].proj_mlp._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[37].proj_mlp._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[37].proj_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[37].proj_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[3].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[3].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[3].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[3].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[3].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[3].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[4].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[4].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[4].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[4].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[4].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[4].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[5].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[5].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[5].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[5].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[5].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[5].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[6].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[6].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[6].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[6].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[6].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[6].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[7].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[7].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[7].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[7].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[7].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[7].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[8].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[8].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[8].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[8].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[8].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[8].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[9].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[9].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[9].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[9].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[9].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[9].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[10].attn.add_k_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[10].attn.add_k_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[10].attn.add_q_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[10].attn.add_q_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[10].attn.add_v_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[10].attn.add_v_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[10].attn.to_add_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[10].attn.to_add_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[11].attn.add_k_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[11].attn.add_k_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[11].attn.add_q_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[11].attn.add_q_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[11].attn.add_v_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[11].attn.add_v_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[11].attn.to_add_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[11].attn.to_add_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[12].attn.add_k_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[12].attn.add_k_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[12].attn.add_q_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[12].attn.add_q_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[12].attn.add_v_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[12].attn.add_v_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[12].attn.to_add_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[12].attn.to_add_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[13].attn.add_k_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[13].attn.add_k_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[13].attn.add_q_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[13].attn.add_q_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[13].attn.add_v_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[13].attn.add_v_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[13].attn.to_add_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[13].attn.to_add_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[14].attn.add_k_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[14].attn.add_k_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[14].attn.add_q_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[14].attn.add_q_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[14].attn.add_v_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[14].attn.add_v_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[14].attn.to_add_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[14].attn.to_add_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[15].attn.add_k_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[15].attn.add_k_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[15].attn.add_q_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[15].attn.add_q_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[15].attn.add_v_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[15].attn.add_v_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[15].attn.to_add_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[15].attn.to_add_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[16].attn.add_k_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[16].attn.add_k_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[16].attn.add_q_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[16].attn.add_q_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[16].attn.add_v_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[16].attn.add_v_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[16].attn.to_add_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[16].attn.to_add_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[17].attn.add_k_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[17].attn.add_k_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[17].attn.add_q_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[17].attn.add_q_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[17].attn.add_v_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[17].attn.add_v_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[17].attn.to_add_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[17].attn.to_add_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[18].attn.add_k_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[18].attn.add_k_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[18].attn.add_q_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[18].attn.add_q_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[18].attn.add_v_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[18].attn.add_v_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[18].attn.to_add_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[18].attn.to_add_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[10].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[10].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[10].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[10].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[10].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[10].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[11].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[11].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[11].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[11].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[11].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[11].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[12].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[12].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[12].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[12].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[12].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[12].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[13].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[13].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[13].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[13].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[13].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[13].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[14].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[14].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[14].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[14].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[14].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[14].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[15].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[15].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[15].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[15].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[15].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[15].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[16].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[16].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[16].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[16].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[16].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[16].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[17].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[17].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[17].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[17].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[17].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[17].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[18].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[18].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[18].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[18].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[18].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[18].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[19].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[19].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[19].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[19].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[19].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[19].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[20].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[20].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[20].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[20].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[20].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[20].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[21].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[21].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[21].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[21].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[21].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[21].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[22].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[22].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[22].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[22].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[22].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[22].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[23].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[23].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[23].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[23].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[23].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[23].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[24].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[24].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[24].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[24].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[24].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[24].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[25].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[25].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[25].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[25].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[25].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[25].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[26].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[26].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[26].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[26].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[26].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[26].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[27].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[27].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[27].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[27].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[27].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[27].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[28].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[28].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[28].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[28].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[28].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[28].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[29].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[29].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[29].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[29].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[29].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[29].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[30].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[30].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[30].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[30].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[30].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[30].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[31].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[31].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[31].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[31].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[31].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[31].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[32].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[32].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[32].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[32].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[32].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[32].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[33].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[33].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[33].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[33].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[33].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[33].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[34].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[34].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[34].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[34].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[34].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[34].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[35].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[35].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[35].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[35].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[35].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[35].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[36].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[36].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[36].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[36].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[36].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[36].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[37].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[37].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[37].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[37].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[37].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[37].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[0].ff_context.net[2]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[0].ff_context.net[2]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[1].ff_context.net[2]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[1].ff_context.net[2]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[2].ff_context.net[2]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[2].ff_context.net[2]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[3].ff_context.net[2]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[3].ff_context.net[2]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[4].ff_context.net[2]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[4].ff_context.net[2]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[5].ff_context.net[2]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[5].ff_context.net[2]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[6].ff_context.net[2]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[6].ff_context.net[2]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[7].ff_context.net[2]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[7].ff_context.net[2]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[8].ff_context.net[2]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[8].ff_context.net[2]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[9].ff_context.net[2]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[9].ff_context.net[2]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[0].norm.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[0].norm.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[1].norm.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[1].norm.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[2].norm.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[2].norm.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[3].norm.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[3].norm.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[4].norm.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[4].norm.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[5].norm.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[5].norm.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[6].norm.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[6].norm.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[7].norm.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[7].norm.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[8].norm.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[8].norm.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[9].norm.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[9].norm.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[10].ff_context.net[2]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[10].ff_context.net[2]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[11].ff_context.net[2]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[11].ff_context.net[2]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[12].ff_context.net[2]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[12].ff_context.net[2]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[13].ff_context.net[2]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[13].ff_context.net[2]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[14].ff_context.net[2]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[14].ff_context.net[2]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[15].ff_context.net[2]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[15].ff_context.net[2]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[16].ff_context.net[2]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[16].ff_context.net[2]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[17].ff_context.net[2]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[17].ff_context.net[2]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[18].ff_context.net[2]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[18].ff_context.net[2]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[10].norm.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[10].norm.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[11].norm.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[11].norm.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[12].norm.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[12].norm.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[13].norm.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[13].norm.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[14].norm.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[14].norm.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[15].norm.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[15].norm.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[16].norm.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[16].norm.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[17].norm.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[17].norm.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[18].norm.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[18].norm.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[19].norm.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[19].norm.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[20].norm.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[20].norm.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[21].norm.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[21].norm.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[22].norm.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[22].norm.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[23].norm.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[23].norm.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[24].norm.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[24].norm.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[25].norm.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[25].norm.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[26].norm.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[26].norm.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[27].norm.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[27].norm.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[28].norm.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[28].norm.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[29].norm.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[29].norm.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[30].norm.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[30].norm.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[31].norm.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[31].norm.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[32].norm.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[32].norm.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[33].norm.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[33].norm.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[34].norm.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[34].norm.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[35].norm.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[35].norm.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[36].norm.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[36].norm.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[37].norm.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[37].norm.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[0].norm1_context.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[0].norm1_context.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[1].norm1_context.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[1].norm1_context.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[2].norm1_context.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[2].norm1_context.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[3].norm1_context.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[3].norm1_context.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[4].norm1_context.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[4].norm1_context.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[5].norm1_context.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[5].norm1_context.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[6].norm1_context.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[6].norm1_context.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[7].norm1_context.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[7].norm1_context.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[8].norm1_context.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[8].norm1_context.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[9].norm1_context.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[9].norm1_context.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[10].norm1_context.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[10].norm1_context.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[11].norm1_context.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[11].norm1_context.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[12].norm1_context.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[12].norm1_context.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[13].norm1_context.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[13].norm1_context.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[14].norm1_context.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[14].norm1_context.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[15].norm1_context.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[15].norm1_context.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[16].norm1_context.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[16].norm1_context.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[17].norm1_context.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[17].norm1_context.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[18].norm1_context.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[18].norm1_context.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[0].ff_context.net[0].proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[0].ff_context.net[0].proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[1].ff_context.net[0].proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[1].ff_context.net[0].proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[2].ff_context.net[0].proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[2].ff_context.net[0].proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[3].ff_context.net[0].proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[3].ff_context.net[0].proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[4].ff_context.net[0].proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[4].ff_context.net[0].proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[5].ff_context.net[0].proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[5].ff_context.net[0].proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[6].ff_context.net[0].proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[6].ff_context.net[0].proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[7].ff_context.net[0].proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[7].ff_context.net[0].proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[8].ff_context.net[0].proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[8].ff_context.net[0].proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[9].ff_context.net[0].proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[9].ff_context.net[0].proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[10].ff_context.net[0].proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[10].ff_context.net[0].proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[11].ff_context.net[0].proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[11].ff_context.net[0].proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[12].ff_context.net[0].proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[12].ff_context.net[0].proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[13].ff_context.net[0].proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[13].ff_context.net[0].proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[14].ff_context.net[0].proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[14].ff_context.net[0].proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[15].ff_context.net[0].proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[15].ff_context.net[0].proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[16].ff_context.net[0].proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[16].ff_context.net[0].proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[17].ff_context.net[0].proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[17].ff_context.net[0].proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[18].ff_context.net[0].proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[18].ff_context.net[0].proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].norm1.linear._active_adapter[0], accessed_by=ListGetItemGuardAccessor(0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[0].norm1.linear._active_adapter[0] == 'default_0' # for active_adapter in self.active_adapters: # peft/tuners/lora/layer.py:559 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].norm1.linear._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].norm1.linear.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[0].norm1.linear.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[0].norm1.linear.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].norm1.linear._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].norm1.linear._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].norm1.linear._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].norm1.linear._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].norm1._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].norm1._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].norm1._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].norm1._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].norm2, accessed_by=DictGetItemGuardAccessor(norm2) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].norm2, 140581773424864) # norm_hidden_states = self.norm2(hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:182 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].norm2.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].norm2.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].norm2.training, 140591004393440) # norm_hidden_states = self.norm2(hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:182 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff_context, accessed_by=DictGetItemGuardAccessor(ff_context) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].ff_context, 140581773425200) # context_ff_output = self.ff_context(norm_encoder_hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:198 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff_context.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[0].ff_context.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff_context.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].ff_context.training, 140591004393440) # context_ff_output = self.ff_context(norm_encoder_hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:198 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff_context._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff_context.net, accessed_by=DictGetItemGuardAccessor(net) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].ff_context.net, 140581773425344) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[0].ff_context.net, 93831537618768) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- LENGTH_CHECK: len(L['self'].transformer_blocks[0].ff_context.net) == 3 # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff_context.net.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff_context.net.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].ff_context.net.training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff_context.net[0], accessed_by=GetItemGuardAccessor(0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].ff_context.net[0], 140581773425296) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff_context.net[0].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[0].ff_context.net[0].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff_context.net[0].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].ff_context.net[0].training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff_context.net[0]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff_context.net[0].proj, accessed_by=DictGetItemGuardAccessor(proj) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].ff_context.net[0].proj, 140537204963216) # hidden_states = self.proj(hidden_states) # diffusers/src/diffusers/models/activations.py:88 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff_context.net[0].proj.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[0].ff_context.net[0].proj.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff_context.net[0].proj.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].ff_context.net[0].proj.training, 140591004393408) # hidden_states = self.proj(hidden_states) # diffusers/src/diffusers/models/activations.py:88 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff_context.net[0].proj._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff_context.net[0].proj.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].ff_context.net[0].proj.lora_A, 140537204959904) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff_context.net[0].proj.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff_context.net[0].proj.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].ff_context.net[0].proj.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff_context.net[0].proj.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].ff_context.net[0].proj.lora_A['default_0'], 140537204954048) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff_context.net[0].proj.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff_context.net[0].proj.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].ff_context.net[0].proj.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff_context.net[0].proj.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff_context.net[0].proj.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].ff_context.net[0].proj.lora_A['default_0'].weight, 140537312296720) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff_context.net[0].proj.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].ff_context.net[0].proj.lora_B, 140537204960864) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff_context.net[0].proj.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff_context.net[0].proj.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].ff_context.net[0].proj.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff_context.net[0].proj.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].ff_context.net[0].proj.lora_B['default_0'], 140537204964128) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff_context.net[0].proj.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff_context.net[0].proj.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].ff_context.net[0].proj.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff_context.net[0].proj.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].ff_context.net[0].proj.base_layer, 140581773425392) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff_context.net[0].proj.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff_context.net[0].proj.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].ff_context.net[0].proj.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff_context.net[0].proj.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].ff_context.net[0].proj.lora_dropout, 140537204961968) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff_context.net[0].proj.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff_context.net[0].proj.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].ff_context.net[0].proj.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff_context.net[0].proj.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].ff_context.net[0].proj.lora_dropout['default_0'], 140537204959280) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff_context.net[0].proj.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff_context.net[0].proj.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].ff_context.net[0].proj.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff_context.net[0].proj.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[0].ff_context.net[0].proj.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[0].ff_context.net[0].proj.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff_context.net[0].proj.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[0].ff_context.net[0].proj.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff_context.net[0].proj.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[0].ff_context.net[0].proj.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[0].ff_context.net[0].proj.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff_context.net[0].proj.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].ff_context.net[0].proj.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff_context.net[0].proj._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff_context.net[0].proj._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff_context.net[0].proj.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[0].ff_context.net[0].proj.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[0].ff_context.net[0].proj.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff_context.net[0].proj._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].ff_context.net[0].proj._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff_context.net[0].proj._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff_context.net[0].proj._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff_context.net[0].proj._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[0].ff_context.net[0].proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[0].ff_context.net[0].proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff_context.net[0].approximate, accessed_by=DictGetItemGuardAccessor(approximate) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[0].ff_context.net[0].approximate == 'tanh' # return F.gelu(gate, approximate=self.approximate) # diffusers/src/diffusers/models/activations.py:83 in gelu V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff_context.net[0]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff_context.net[0]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff_context.net[0]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff_context.net[0]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff_context.net[1], accessed_by=GetItemGuardAccessor(1) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].ff_context.net[1], 140581773425488) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff_context.net[1].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff_context.net[1].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].ff_context.net[1].training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff_context.net[2], accessed_by=GetItemGuardAccessor(2) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].ff_context.net[2], 140537204959952) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff_context.net[2].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[0].ff_context.net[2].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff_context.net[2].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].ff_context.net[2].training, 140591004393408) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff_context.net[2]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff_context.net[2].lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].ff_context.net[2].lora_A, 140537204959856) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff_context.net[2].lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff_context.net[2].lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].ff_context.net[2].lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff_context.net[2].lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].ff_context.net[2].lora_A['default_0'], 140526788912624) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff_context.net[2].lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff_context.net[2].lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].ff_context.net[2].lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff_context.net[2].lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff_context.net[2].lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].ff_context.net[2].lora_A['default_0'].weight, 140537312292720) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff_context.net[2].lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].ff_context.net[2].lora_B, 140537204965280) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff_context.net[2].lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff_context.net[2].lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].ff_context.net[2].lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff_context.net[2].lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].ff_context.net[2].lora_B['default_0'], 140526788912528) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff_context.net[2].lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff_context.net[2].lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].ff_context.net[2].lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff_context.net[2].base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].ff_context.net[2].base_layer, 140581773425536) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff_context.net[2].base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff_context.net[2].base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].ff_context.net[2].base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff_context.net[2].lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].ff_context.net[2].lora_dropout, 140537204962352) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff_context.net[2].lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff_context.net[2].lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].ff_context.net[2].lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff_context.net[2].lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].ff_context.net[2].lora_dropout['default_0'], 140537204960432) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff_context.net[2].lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff_context.net[2].lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].ff_context.net[2].lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff_context.net[2].scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[0].ff_context.net[2].scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[0].ff_context.net[2].scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff_context.net[2].scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[0].ff_context.net[2].scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff_context.net[2].use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[0].ff_context.net[2].use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[0].ff_context.net[2].use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff_context.net[2].use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].ff_context.net[2].use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff_context.net[2]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff_context.net[2]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff_context.net[2].merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[0].ff_context.net[2].merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[0].ff_context.net[2].merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff_context.net[2]._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].ff_context.net[2]._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff_context.net[2]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff_context.net[2]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff_context.net[2]._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[0].ff_context.net[2]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[0].ff_context.net[2]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff_context._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff_context._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff_context._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].ff_context._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].norm1_context, accessed_by=DictGetItemGuardAccessor(norm1_context) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].norm1_context, 140581773423712) # norm_encoder_hidden_states, c_gate_msa, c_shift_mlp, c_scale_mlp, c_gate_mlp = self.norm1_context( # diffusers/src/diffusers/models/transformers/transformer_flux.py:167 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].norm1_context.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[0].norm1_context.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].norm1_context.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].norm1_context.training, 140591004393440) # norm_encoder_hidden_states, c_gate_msa, c_shift_mlp, c_scale_mlp, c_gate_mlp = self.norm1_context( # diffusers/src/diffusers/models/transformers/transformer_flux.py:167 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].norm1_context.emb, accessed_by=DictGetItemGuardAccessor(emb) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].norm1_context.emb, 140591004478624) # if self.emb is not None: # diffusers/src/diffusers/models/normalization.py:135 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].norm1_context._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].norm1_context.norm, accessed_by=DictGetItemGuardAccessor(norm) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].norm1_context.norm, 140581773423856) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:139 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].norm1_context.norm.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].norm1_context.norm.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].norm1_context.norm.training, 140591004393440) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:139 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].norm1_context.silu, accessed_by=DictGetItemGuardAccessor(silu) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].norm1_context.silu, 140581773423760) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].norm1_context.silu.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].norm1_context.silu.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].norm1_context.silu.training, 140591004393440) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].norm1_context.linear, accessed_by=DictGetItemGuardAccessor(linear) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].norm1_context.linear, 140526788996800) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].norm1_context.linear.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[0].norm1_context.linear.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].norm1_context.linear.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].norm1_context.linear.training, 140591004393408) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].norm1_context.linear._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].norm1_context.linear.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].norm1_context.linear.lora_A, 140526788996464) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].norm1_context.linear.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].norm1_context.linear.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].norm1_context.linear.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].norm1_context.linear.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].norm1_context.linear.lora_A['default_0'], 140526788990464) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].norm1_context.linear.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].norm1_context.linear.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].norm1_context.linear.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].norm1_context.linear.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].norm1_context.linear.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].norm1_context.linear.lora_A['default_0'].weight, 140526270521616) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].norm1_context.linear.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].norm1_context.linear.lora_B, 140526788998960) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].norm1_context.linear.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].norm1_context.linear.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].norm1_context.linear.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].norm1_context.linear.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].norm1_context.linear.lora_B['default_0'], 140526788989840) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].norm1_context.linear.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].norm1_context.linear.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].norm1_context.linear.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].norm1_context.linear.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].norm1_context.linear.base_layer, 140581773423808) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].norm1_context.linear.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].norm1_context.linear.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].norm1_context.linear.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].norm1_context.linear.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].norm1_context.linear.lora_dropout, 140526788990800) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].norm1_context.linear.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].norm1_context.linear.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].norm1_context.linear.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].norm1_context.linear.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].norm1_context.linear.lora_dropout['default_0'], 140526788993632) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].norm1_context.linear.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].norm1_context.linear.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].norm1_context.linear.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].norm1_context.linear.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[0].norm1_context.linear.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[0].norm1_context.linear.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].norm1_context.linear.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[0].norm1_context.linear.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].norm1_context.linear.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[0].norm1_context.linear.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[0].norm1_context.linear.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].norm1_context.linear.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].norm1_context.linear.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].norm1_context.linear._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].norm1_context.linear._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].norm1_context.linear.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[0].norm1_context.linear.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[0].norm1_context.linear.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].norm1_context.linear._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].norm1_context.linear._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].norm1_context.linear._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].norm1_context.linear._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].norm1_context.linear._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[0].norm1_context.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[0].norm1_context.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].norm1_context._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].norm1_context._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].norm1_context._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].norm1_context._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].norm2_context, accessed_by=DictGetItemGuardAccessor(norm2_context) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].norm2_context, 140581773424912) # norm_encoder_hidden_states = self.norm2_context(encoder_hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:195 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].norm2_context.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0].norm2_context.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[0].norm2_context.training, 140591004393440) # norm_encoder_hidden_states = self.norm2_context(encoder_hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:195 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[0]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | +- GuardManager: source=L['self'].transformer_blocks[1], accessed_by=GetItemGuardAccessor(1) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1], 140581773423424) # for index_block, block in enumerate(self.transformer_blocks): # diffusers/src/diffusers/models/transformers/transformer_flux.py:471 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[1].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].training, 140591004393440) # for index_block, block in enumerate(self.transformer_blocks): # diffusers/src/diffusers/models/transformers/transformer_flux.py:471 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff, accessed_by=DictGetItemGuardAccessor(ff) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].ff, 140581767528656) # ff_output = self.ff(norm_hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:185 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[1].ff.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].ff.training, 140591004393440) # ff_output = self.ff(norm_hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:185 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff.net, accessed_by=DictGetItemGuardAccessor(net) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].ff.net, 140581767528896) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[1].ff.net, 93831537618768) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- LENGTH_CHECK: len(L['self'].transformer_blocks[1].ff.net) == 3 # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff.net.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff.net.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].ff.net.training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff.net[0], accessed_by=GetItemGuardAccessor(0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].ff.net[0], 140581767528848) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff.net[0].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[1].ff.net[0].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff.net[0].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].ff.net[0].training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff.net[0]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff.net[0].proj, accessed_by=DictGetItemGuardAccessor(proj) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].ff.net[0].proj, 140533121125296) # hidden_states = self.proj(hidden_states) # diffusers/src/diffusers/models/activations.py:88 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff.net[0].proj.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[1].ff.net[0].proj.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff.net[0].proj.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].ff.net[0].proj.training, 140591004393408) # hidden_states = self.proj(hidden_states) # diffusers/src/diffusers/models/activations.py:88 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff.net[0].proj._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff.net[0].proj.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].ff.net[0].proj.lora_A, 140533121134224) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff.net[0].proj.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff.net[0].proj.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].ff.net[0].proj.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff.net[0].proj.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].ff.net[0].proj.lora_A['default_0'], 140533121132688) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff.net[0].proj.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff.net[0].proj.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].ff.net[0].proj.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff.net[0].proj.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff.net[0].proj.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].ff.net[0].proj.lora_A['default_0'].weight, 140537313927440) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff.net[0].proj.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].ff.net[0].proj.lora_B, 140533121132544) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff.net[0].proj.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff.net[0].proj.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].ff.net[0].proj.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff.net[0].proj.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].ff.net[0].proj.lora_B['default_0'], 140533121132496) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff.net[0].proj.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff.net[0].proj.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].ff.net[0].proj.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff.net[0].proj.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].ff.net[0].proj.base_layer, 140581767528944) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff.net[0].proj.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff.net[0].proj.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].ff.net[0].proj.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff.net[0].proj.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].ff.net[0].proj.lora_dropout, 140533121122560) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff.net[0].proj.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff.net[0].proj.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].ff.net[0].proj.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff.net[0].proj.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].ff.net[0].proj.lora_dropout['default_0'], 140533121125584) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff.net[0].proj.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff.net[0].proj.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].ff.net[0].proj.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff.net[0].proj.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[1].ff.net[0].proj.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[1].ff.net[0].proj.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff.net[0].proj.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[1].ff.net[0].proj.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff.net[0].proj.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[1].ff.net[0].proj.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[1].ff.net[0].proj.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff.net[0].proj.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].ff.net[0].proj.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff.net[0].proj._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff.net[0].proj._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff.net[0].proj.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[1].ff.net[0].proj.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[1].ff.net[0].proj.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff.net[0].proj._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].ff.net[0].proj._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff.net[0].proj._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff.net[0].proj._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff.net[0].proj._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[1].ff.net[0].proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[1].ff.net[0].proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff.net[0].approximate, accessed_by=DictGetItemGuardAccessor(approximate) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[1].ff.net[0].approximate == 'tanh' # return F.gelu(gate, approximate=self.approximate) # diffusers/src/diffusers/models/activations.py:83 in gelu V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff.net[0]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff.net[0]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff.net[0]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff.net[0]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff.net[1], accessed_by=GetItemGuardAccessor(1) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].ff.net[1], 140581767528992) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff.net[1].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff.net[1].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].ff.net[1].training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff.net[2], accessed_by=GetItemGuardAccessor(2) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].ff.net[2], 140533121128512) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff.net[2].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[1].ff.net[2].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff.net[2].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].ff.net[2].training, 140591004393408) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff.net[2]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff.net[2].lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].ff.net[2].lora_A, 140533121601728) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff.net[2].lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff.net[2].lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].ff.net[2].lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff.net[2].lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].ff.net[2].lora_A['default_0'], 140533121602544) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff.net[2].lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff.net[2].lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].ff.net[2].lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff.net[2].lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff.net[2].lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].ff.net[2].lora_A['default_0'].weight, 140537313923440) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff.net[2].lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].ff.net[2].lora_B, 140533121606912) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff.net[2].lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff.net[2].lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].ff.net[2].lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff.net[2].lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].ff.net[2].lora_B['default_0'], 140533121599424) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff.net[2].lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff.net[2].lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].ff.net[2].lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff.net[2].base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].ff.net[2].base_layer, 140581767529040) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff.net[2].base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff.net[2].base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].ff.net[2].base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff.net[2].lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].ff.net[2].lora_dropout, 140533121613152) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff.net[2].lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff.net[2].lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].ff.net[2].lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff.net[2].lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].ff.net[2].lora_dropout['default_0'], 140533121612096) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff.net[2].lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff.net[2].lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].ff.net[2].lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff.net[2].scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[1].ff.net[2].scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[1].ff.net[2].scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff.net[2].scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[1].ff.net[2].scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff.net[2].use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[1].ff.net[2].use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[1].ff.net[2].use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff.net[2].use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].ff.net[2].use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff.net[2]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff.net[2]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff.net[2].merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[1].ff.net[2].merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[1].ff.net[2].merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff.net[2]._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].ff.net[2]._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff.net[2]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff.net[2]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff.net[2]._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[1].ff.net[2]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[1].ff.net[2]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn, accessed_by=DictGetItemGuardAccessor(attn) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn, 140581773426064) # attn_output, context_attn_output = self.attn( # diffusers/src/diffusers/models/transformers/transformer_flux.py:172 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[1].attn.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.training, 140591004393440) # attn_output, context_attn_output = self.attn( # diffusers/src/diffusers/models/transformers/transformer_flux.py:172 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_k, accessed_by=DictGetItemGuardAccessor(to_k) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.to_k, 140526788903456) # key = attn.to_k(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1717 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[1].attn.to_k.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.to_k.training, 140591004393408) # key = attn.to_k(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1717 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_k._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_k.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.to_k.lora_A, 140526788903792) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_k.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_k.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.to_k.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_k.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.to_k.lora_A['default_0'], 140526788906336) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_k.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_k.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.to_k.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_k.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_k.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.to_k.lora_A['default_0'].weight, 140537312134464) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_k.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.to_k.lora_B, 140526788913920) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_k.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_k.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.to_k.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_k.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.to_k.lora_B['default_0'], 140526788906288) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_k.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_k.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.to_k.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_k.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.to_k.base_layer, 140581773426208) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_k.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_k.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.to_k.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_k.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.to_k.lora_dropout, 140526788912096) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_k.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_k.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.to_k.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_k.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.to_k.lora_dropout['default_0'], 140526788905808) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_k.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_k.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.to_k.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_k.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[1].attn.to_k.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[1].attn.to_k.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_k.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[1].attn.to_k.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_k.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[1].attn.to_k.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[1].attn.to_k.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_k.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.to_k.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_k._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_k._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_k.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[1].attn.to_k.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[1].attn.to_k.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_k._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.to_k._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_k._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_k._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_k._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[1].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[1].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_q, accessed_by=DictGetItemGuardAccessor(to_q) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.to_q, 140526788911856) # query = attn.to_q(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1716 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[1].attn.to_q.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.to_q.training, 140591004393408) # query = attn.to_q(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1716 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_q._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_q.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.to_q.lora_A, 140526788911568) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_q.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_q.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.to_q.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_q.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.to_q.lora_A['default_0'], 140526788907056) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_q.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_q.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.to_q.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_q.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_q.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.to_q.lora_A['default_0'].weight, 140537312134624) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_q.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.to_q.lora_B, 140526788913824) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_q.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_q.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.to_q.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_q.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.to_q.lora_B['default_0'], 140526788907104) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_q.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_q.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.to_q.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_q.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.to_q.base_layer, 140581773426304) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_q.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_q.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.to_q.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_q.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.to_q.lora_dropout, 140526788908400) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_q.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_q.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.to_q.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_q.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.to_q.lora_dropout['default_0'], 140526788907488) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_q.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_q.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.to_q.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_q.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[1].attn.to_q.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[1].attn.to_q.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_q.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[1].attn.to_q.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_q.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[1].attn.to_q.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[1].attn.to_q.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_q.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.to_q.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_q._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_q._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_q.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[1].attn.to_q.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[1].attn.to_q.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_q._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.to_q._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_q._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_q._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_q._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[1].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[1].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_v, accessed_by=DictGetItemGuardAccessor(to_v) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.to_v, 140526788908256) # value = attn.to_v(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1718 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_v.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[1].attn.to_v.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_v.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.to_v.training, 140591004393408) # value = attn.to_v(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1718 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_v._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_v.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.to_v.lora_A, 140526788904032) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_v.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_v.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.to_v.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_v.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.to_v.lora_A['default_0'], 140526788907152) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_v.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_v.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.to_v.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_v.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_v.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.to_v.lora_A['default_0'].weight, 140537319401936) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_v.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.to_v.lora_B, 140526788906480) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_v.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_v.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.to_v.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_v.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.to_v.lora_B['default_0'], 140526788904656) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_v.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_v.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.to_v.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_v.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.to_v.base_layer, 140581773426400) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_v.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_v.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.to_v.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_v.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.to_v.lora_dropout, 140526788905904) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_v.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_v.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.to_v.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_v.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.to_v.lora_dropout['default_0'], 140526788905616) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_v.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_v.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.to_v.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_v.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[1].attn.to_v.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[1].attn.to_v.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_v.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[1].attn.to_v.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_v.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[1].attn.to_v.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[1].attn.to_v.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_v.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.to_v.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_v._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_v._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_v.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[1].attn.to_v.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[1].attn.to_v.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_v._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.to_v._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_v._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_v._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_v._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[1].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[1].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.norm_k, accessed_by=DictGetItemGuardAccessor(norm_k) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.norm_k, 140581773426256) # if attn.norm_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1729 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.norm_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[1].attn.norm_k.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.norm_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.norm_k.training, 140591004393440) # if attn.norm_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1729 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.norm_k.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[1].attn.norm_k.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.norm_k._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.norm_k.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.norm_k.weight, 140581785356144) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.norm_k._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.norm_k._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.norm_k._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.norm_k._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.norm_q, accessed_by=DictGetItemGuardAccessor(norm_q) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.norm_q, 140581773426160) # if attn.norm_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1727 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.norm_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[1].attn.norm_q.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.norm_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.norm_q.training, 140591004393440) # if attn.norm_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1727 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.norm_q.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[1].attn.norm_q.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.norm_q._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.norm_q.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.norm_q.weight, 140581765888128) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.norm_q._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.norm_q._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.norm_q._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.norm_q._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_out, accessed_by=DictGetItemGuardAccessor(to_out) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.to_out, 140581773426592) # hidden_states = attn.to_out[0](hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1776 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_out.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_out.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.to_out.training, 140591004393440) # hidden_states = attn.to_out[0](hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1776 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_out[0], accessed_by=GetItemGuardAccessor(0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.to_out[0], 140526788907248) # hidden_states = attn.to_out[0](hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1776 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_out[0].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[1].attn.to_out[0].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_out[0].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.to_out[0].training, 140591004393408) # hidden_states = attn.to_out[0](hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1776 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_out[0]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_out[0].lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.to_out[0].lora_A, 140526788905136) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_out[0].lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_out[0].lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.to_out[0].lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_out[0].lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.to_out[0].lora_A['default_0'], 140526788904464) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_out[0].lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_out[0].lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.to_out[0].lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_out[0].lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_out[0].lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.to_out[0].lora_A['default_0'].weight, 140537314121408) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_out[0].lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.to_out[0].lora_B, 140526788910368) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_out[0].lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_out[0].lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.to_out[0].lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_out[0].lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.to_out[0].lora_B['default_0'], 140526788906240) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_out[0].lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_out[0].lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.to_out[0].lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_out[0].base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.to_out[0].base_layer, 140581773426640) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_out[0].base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_out[0].base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.to_out[0].base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_out[0].lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.to_out[0].lora_dropout, 140526788909792) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_out[0].lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_out[0].lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.to_out[0].lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_out[0].lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.to_out[0].lora_dropout['default_0'], 140526788907968) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_out[0].lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_out[0].lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.to_out[0].lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_out[0].scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[1].attn.to_out[0].scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[1].attn.to_out[0].scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_out[0].scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[1].attn.to_out[0].scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_out[0].use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[1].attn.to_out[0].use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[1].attn.to_out[0].use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_out[0].use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.to_out[0].use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_out[0]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_out[0]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_out[0].merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[1].attn.to_out[0].merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[1].attn.to_out[0].merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_out[0]._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.to_out[0]._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_out[0]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_out[0]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_out[0]._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[1].attn.to_out[0]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[1].attn.to_out[0]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_out[1], accessed_by=GetItemGuardAccessor(1) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.to_out[1], 140581767528512) # hidden_states = attn.to_out[1](hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1778 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_out[1].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_out[1].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.to_out[1].training, 140591004393440) # hidden_states = attn.to_out[1](hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1778 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.add_k_proj, accessed_by=DictGetItemGuardAccessor(add_k_proj) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.add_k_proj, 140526788917280) # encoder_hidden_states_key_proj = attn.add_k_proj(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1736 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.add_k_proj.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[1].attn.add_k_proj.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.add_k_proj.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.add_k_proj.training, 140591004393408) # encoder_hidden_states_key_proj = attn.add_k_proj(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1736 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.add_k_proj._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.add_k_proj.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.add_k_proj.lora_A, 140526788917184) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.add_k_proj.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.add_k_proj.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.add_k_proj.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.add_k_proj.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.add_k_proj.lora_A['default_0'], 140526788914640) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.add_k_proj.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.add_k_proj.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.add_k_proj.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.add_k_proj.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.add_k_proj.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.add_k_proj.lora_A['default_0'].weight, 140537314131728) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.add_k_proj.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.add_k_proj.lora_B, 140526788904512) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.add_k_proj.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.add_k_proj.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.add_k_proj.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.add_k_proj.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.add_k_proj.lora_B['default_0'], 140526788917904) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.add_k_proj.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.add_k_proj.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.add_k_proj.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.add_k_proj.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.add_k_proj.base_layer, 140581773426448) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.add_k_proj.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.add_k_proj.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.add_k_proj.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.add_k_proj.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.add_k_proj.lora_dropout, 140526788907872) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.add_k_proj.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.add_k_proj.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.add_k_proj.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.add_k_proj.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.add_k_proj.lora_dropout['default_0'], 140526788909168) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.add_k_proj.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.add_k_proj.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.add_k_proj.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.add_k_proj.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[1].attn.add_k_proj.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[1].attn.add_k_proj.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.add_k_proj.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[1].attn.add_k_proj.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.add_k_proj.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[1].attn.add_k_proj.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[1].attn.add_k_proj.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.add_k_proj.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.add_k_proj.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.add_k_proj._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.add_k_proj._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.add_k_proj.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[1].attn.add_k_proj.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[1].attn.add_k_proj.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.add_k_proj._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.add_k_proj._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.add_k_proj._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.add_k_proj._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.add_k_proj._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[1].attn.add_k_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[1].attn.add_k_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.add_q_proj, accessed_by=DictGetItemGuardAccessor(add_q_proj) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.add_q_proj, 140526788914304) # encoder_hidden_states_query_proj = attn.add_q_proj(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1735 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.add_q_proj.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[1].attn.add_q_proj.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.add_q_proj.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.add_q_proj.training, 140591004393408) # encoder_hidden_states_query_proj = attn.add_q_proj(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1735 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.add_q_proj._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.add_q_proj.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.add_q_proj.lora_A, 140526788914064) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.add_q_proj.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.add_q_proj.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.add_q_proj.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.add_q_proj.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.add_q_proj.lora_A['default_0'], 140526788911184) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.add_q_proj.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.add_q_proj.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.add_q_proj.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.add_q_proj.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.add_q_proj.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.add_q_proj.lora_A['default_0'].weight, 140537314116768) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.add_q_proj.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.add_q_proj.lora_B, 140526788915840) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.add_q_proj.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.add_q_proj.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.add_q_proj.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.add_q_proj.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.add_q_proj.lora_B['default_0'], 140526788912000) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.add_q_proj.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.add_q_proj.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.add_q_proj.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.add_q_proj.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.add_q_proj.base_layer, 140581773426544) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.add_q_proj.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.add_q_proj.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.add_q_proj.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.add_q_proj.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.add_q_proj.lora_dropout, 140526788915216) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.add_q_proj.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.add_q_proj.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.add_q_proj.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.add_q_proj.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.add_q_proj.lora_dropout['default_0'], 140526788912816) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.add_q_proj.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.add_q_proj.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.add_q_proj.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.add_q_proj.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[1].attn.add_q_proj.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[1].attn.add_q_proj.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.add_q_proj.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[1].attn.add_q_proj.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.add_q_proj.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[1].attn.add_q_proj.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[1].attn.add_q_proj.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.add_q_proj.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.add_q_proj.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.add_q_proj._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.add_q_proj._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.add_q_proj.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[1].attn.add_q_proj.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[1].attn.add_q_proj.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.add_q_proj._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.add_q_proj._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.add_q_proj._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.add_q_proj._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.add_q_proj._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[1].attn.add_q_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[1].attn.add_q_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.add_v_proj, accessed_by=DictGetItemGuardAccessor(add_v_proj) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.add_v_proj, 140526788912048) # encoder_hidden_states_value_proj = attn.add_v_proj(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1737 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.add_v_proj.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[1].attn.add_v_proj.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.add_v_proj.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.add_v_proj.training, 140591004393408) # encoder_hidden_states_value_proj = attn.add_v_proj(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1737 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.add_v_proj._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.add_v_proj.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.add_v_proj.lora_A, 140526788917664) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.add_v_proj.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.add_v_proj.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.add_v_proj.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.add_v_proj.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.add_v_proj.lora_A['default_0'], 140526788916032) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.add_v_proj.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.add_v_proj.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.add_v_proj.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.add_v_proj.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.add_v_proj.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.add_v_proj.lora_A['default_0'].weight, 140537314129808) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.add_v_proj.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.add_v_proj.lora_B, 140526788914496) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.add_v_proj.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.add_v_proj.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.add_v_proj.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.add_v_proj.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.add_v_proj.lora_B['default_0'], 140526788916128) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.add_v_proj.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.add_v_proj.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.add_v_proj.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.add_v_proj.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.add_v_proj.base_layer, 140581773426496) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.add_v_proj.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.add_v_proj.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.add_v_proj.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.add_v_proj.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.add_v_proj.lora_dropout, 140526788917856) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.add_v_proj.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.add_v_proj.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.add_v_proj.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.add_v_proj.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.add_v_proj.lora_dropout['default_0'], 140526788915792) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.add_v_proj.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.add_v_proj.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.add_v_proj.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.add_v_proj.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[1].attn.add_v_proj.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[1].attn.add_v_proj.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.add_v_proj.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[1].attn.add_v_proj.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.add_v_proj.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[1].attn.add_v_proj.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[1].attn.add_v_proj.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.add_v_proj.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.add_v_proj.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.add_v_proj._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.add_v_proj._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.add_v_proj.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[1].attn.add_v_proj.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[1].attn.add_v_proj.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.add_v_proj._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.add_v_proj._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.add_v_proj._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.add_v_proj._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.add_v_proj._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[1].attn.add_v_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[1].attn.add_v_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_add_out, accessed_by=DictGetItemGuardAccessor(to_add_out) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.to_add_out, 140526788992816) # encoder_hidden_states = attn.to_add_out(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1779 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_add_out.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[1].attn.to_add_out.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_add_out.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.to_add_out.training, 140591004393408) # encoder_hidden_states = attn.to_add_out(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1779 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_add_out._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_add_out.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.to_add_out.lora_A, 140533121019296) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_add_out.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_add_out.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.to_add_out.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_add_out.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.to_add_out.lora_A['default_0'], 140533121015312) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_add_out.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_add_out.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.to_add_out.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_add_out.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_add_out.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.to_add_out.lora_A['default_0'].weight, 140537315126592) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_add_out.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.to_add_out.lora_B, 140533121010128) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_add_out.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_add_out.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.to_add_out.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_add_out.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.to_add_out.lora_B['default_0'], 140533121020208) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_add_out.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_add_out.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.to_add_out.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_add_out.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.to_add_out.base_layer, 140581767528560) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_add_out.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_add_out.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.to_add_out.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_add_out.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.to_add_out.lora_dropout, 140533121011520) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_add_out.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_add_out.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.to_add_out.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_add_out.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.to_add_out.lora_dropout['default_0'], 140533121021408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_add_out.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_add_out.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.to_add_out.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_add_out.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[1].attn.to_add_out.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[1].attn.to_add_out.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_add_out.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[1].attn.to_add_out.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_add_out.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[1].attn.to_add_out.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[1].attn.to_add_out.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_add_out.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.to_add_out.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_add_out._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_add_out._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_add_out.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[1].attn.to_add_out.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[1].attn.to_add_out.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_add_out._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.to_add_out._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_add_out._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_add_out._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.to_add_out._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[1].attn.to_add_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[1].attn.to_add_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.norm_added_k, accessed_by=DictGetItemGuardAccessor(norm_added_k) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.norm_added_k, 140581767528704) # if attn.norm_added_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1751 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.norm_added_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[1].attn.norm_added_k.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.norm_added_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.norm_added_k.training, 140591004393440) # if attn.norm_added_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1751 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.norm_added_k.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[1].attn.norm_added_k.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.norm_added_k._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.norm_added_k.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.norm_added_k.weight, 140581785356064) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.norm_added_k._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.norm_added_k._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.norm_added_k._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.norm_added_k._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.norm_added_q, accessed_by=DictGetItemGuardAccessor(norm_added_q) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.norm_added_q, 140581767528608) # if attn.norm_added_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1749 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.norm_added_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[1].attn.norm_added_q.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.norm_added_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.norm_added_q.training, 140591004393440) # if attn.norm_added_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1749 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.norm_added_q.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[1].attn.norm_added_q.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.norm_added_q._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.norm_added_q.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.norm_added_q.weight, 140581774377824) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.norm_added_q._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.norm_added_q._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.norm_added_q._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.norm_added_q._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.heads, accessed_by=DictGetItemGuardAccessor(heads) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[1].attn.heads == 24 # head_dim = inner_dim // attn.heads # diffusers/src/diffusers/models/attention_processor.py:1721 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn.processor, accessed_by=DictGetItemGuardAccessor(processor) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[1].attn.processor, 93831581524080) # attn_parameters = set(inspect.signature(self.processor.__call__).parameters.keys()) # diffusers/src/diffusers/models/attention_processor.py:479 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].attn.processor, 140581773426016) # return self.processor( # diffusers/src/diffusers/models/attention_processor.py:490 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].attn._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].norm1, accessed_by=DictGetItemGuardAccessor(norm1) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].norm1, 140581773425584) # norm_hidden_states, gate_msa, shift_mlp, scale_mlp, gate_mlp = self.norm1(hidden_states, emb=temb) # diffusers/src/diffusers/models/transformers/transformer_flux.py:165 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].norm1.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[1].norm1.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].norm1.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].norm1.training, 140591004393440) # norm_hidden_states, gate_msa, shift_mlp, scale_mlp, gate_mlp = self.norm1(hidden_states, emb=temb) # diffusers/src/diffusers/models/transformers/transformer_flux.py:165 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].norm1.emb, accessed_by=DictGetItemGuardAccessor(emb) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].norm1.emb, 140591004478624) # if self.emb is not None: # diffusers/src/diffusers/models/normalization.py:135 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].norm1._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].norm1.norm, accessed_by=DictGetItemGuardAccessor(norm) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].norm1.norm, 140581773425728) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:139 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].norm1.norm.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].norm1.norm.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].norm1.norm.training, 140591004393440) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:139 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].norm1.silu, accessed_by=DictGetItemGuardAccessor(silu) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].norm1.silu, 140581773425632) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].norm1.silu.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].norm1.silu.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].norm1.silu.training, 140591004393440) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].norm1.linear, accessed_by=DictGetItemGuardAccessor(linear) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].norm1.linear, 140526788918000) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].norm1.linear.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[1].norm1.linear.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].norm1.linear.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].norm1.linear.training, 140591004393408) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].norm1.linear._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].norm1.linear.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].norm1.linear.lora_A, 140526788908160) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].norm1.linear.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].norm1.linear.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].norm1.linear.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].norm1.linear.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].norm1.linear.lora_A['default_0'], 140526788915696) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].norm1.linear.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].norm1.linear.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].norm1.linear.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].norm1.linear.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].norm1.linear.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].norm1.linear.lora_A['default_0'].weight, 140537312142064) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].norm1.linear.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].norm1.linear.lora_B, 140526788907920) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].norm1.linear.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].norm1.linear.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].norm1.linear.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].norm1.linear.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].norm1.linear.lora_B['default_0'], 140526788915744) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].norm1.linear.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].norm1.linear.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].norm1.linear.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].norm1.linear.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].norm1.linear.base_layer, 140581773425680) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].norm1.linear.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].norm1.linear.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].norm1.linear.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].norm1.linear.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].norm1.linear.lora_dropout, 140526788913584) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].norm1.linear.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].norm1.linear.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].norm1.linear.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].norm1.linear.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].norm1.linear.lora_dropout['default_0'], 140526788915648) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].norm1.linear.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].norm1.linear.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].norm1.linear.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].norm1.linear.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[1].norm1.linear.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[1].norm1.linear.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].norm1.linear.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[1].norm1.linear.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].norm1.linear.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[1].norm1.linear.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[1].norm1.linear.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].norm1.linear.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].norm1.linear.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].norm1.linear._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].norm1.linear._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].norm1.linear.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[1].norm1.linear.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[1].norm1.linear.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].norm1.linear._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].norm1.linear._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].norm1.linear._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].norm1.linear._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].norm1.linear._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[1].norm1.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[1].norm1.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].norm1._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].norm1._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].norm1._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].norm1._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].norm2, accessed_by=DictGetItemGuardAccessor(norm2) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].norm2, 140581767528752) # norm_hidden_states = self.norm2(hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:182 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].norm2.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].norm2.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].norm2.training, 140591004393440) # norm_hidden_states = self.norm2(hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:182 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff_context, accessed_by=DictGetItemGuardAccessor(ff_context) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].ff_context, 140581767529088) # context_ff_output = self.ff_context(norm_encoder_hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:198 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff_context.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[1].ff_context.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff_context.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].ff_context.training, 140591004393440) # context_ff_output = self.ff_context(norm_encoder_hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:198 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff_context._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff_context.net, accessed_by=DictGetItemGuardAccessor(net) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].ff_context.net, 140581767529232) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[1].ff_context.net, 93831537618768) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- LENGTH_CHECK: len(L['self'].transformer_blocks[1].ff_context.net) == 3 # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff_context.net.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff_context.net.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].ff_context.net.training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff_context.net[0], accessed_by=GetItemGuardAccessor(0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].ff_context.net[0], 140581767529184) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff_context.net[0].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[1].ff_context.net[0].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff_context.net[0].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].ff_context.net[0].training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff_context.net[0]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff_context.net[0].proj, accessed_by=DictGetItemGuardAccessor(proj) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].ff_context.net[0].proj, 140533121597840) # hidden_states = self.proj(hidden_states) # diffusers/src/diffusers/models/activations.py:88 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff_context.net[0].proj.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[1].ff_context.net[0].proj.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff_context.net[0].proj.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].ff_context.net[0].proj.training, 140591004393408) # hidden_states = self.proj(hidden_states) # diffusers/src/diffusers/models/activations.py:88 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff_context.net[0].proj._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff_context.net[0].proj.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].ff_context.net[0].proj.lora_A, 140533121611520) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff_context.net[0].proj.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff_context.net[0].proj.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].ff_context.net[0].proj.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff_context.net[0].proj.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].ff_context.net[0].proj.lora_A['default_0'], 140533121599712) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff_context.net[0].proj.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff_context.net[0].proj.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].ff_context.net[0].proj.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff_context.net[0].proj.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff_context.net[0].proj.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].ff_context.net[0].proj.lora_A['default_0'].weight, 140537313929760) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff_context.net[0].proj.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].ff_context.net[0].proj.lora_B, 140533121597600) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff_context.net[0].proj.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff_context.net[0].proj.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].ff_context.net[0].proj.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff_context.net[0].proj.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].ff_context.net[0].proj.lora_B['default_0'], 140533121601440) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff_context.net[0].proj.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff_context.net[0].proj.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].ff_context.net[0].proj.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff_context.net[0].proj.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].ff_context.net[0].proj.base_layer, 140581767529280) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff_context.net[0].proj.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff_context.net[0].proj.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].ff_context.net[0].proj.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff_context.net[0].proj.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].ff_context.net[0].proj.lora_dropout, 140533121612720) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff_context.net[0].proj.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff_context.net[0].proj.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].ff_context.net[0].proj.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff_context.net[0].proj.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].ff_context.net[0].proj.lora_dropout['default_0'], 140533121613584) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff_context.net[0].proj.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff_context.net[0].proj.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].ff_context.net[0].proj.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff_context.net[0].proj.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[1].ff_context.net[0].proj.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[1].ff_context.net[0].proj.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff_context.net[0].proj.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[1].ff_context.net[0].proj.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff_context.net[0].proj.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[1].ff_context.net[0].proj.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[1].ff_context.net[0].proj.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff_context.net[0].proj.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].ff_context.net[0].proj.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff_context.net[0].proj._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff_context.net[0].proj._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff_context.net[0].proj.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[1].ff_context.net[0].proj.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[1].ff_context.net[0].proj.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff_context.net[0].proj._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].ff_context.net[0].proj._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff_context.net[0].proj._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff_context.net[0].proj._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff_context.net[0].proj._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[1].ff_context.net[0].proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[1].ff_context.net[0].proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff_context.net[0].approximate, accessed_by=DictGetItemGuardAccessor(approximate) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[1].ff_context.net[0].approximate == 'tanh' # return F.gelu(gate, approximate=self.approximate) # diffusers/src/diffusers/models/activations.py:83 in gelu V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff_context.net[0]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff_context.net[0]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff_context.net[0]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff_context.net[0]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff_context.net[1], accessed_by=GetItemGuardAccessor(1) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].ff_context.net[1], 140581767529376) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff_context.net[1].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff_context.net[1].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].ff_context.net[1].training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff_context.net[2], accessed_by=GetItemGuardAccessor(2) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].ff_context.net[2], 140533121600000) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff_context.net[2].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[1].ff_context.net[2].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff_context.net[2].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].ff_context.net[2].training, 140591004393408) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff_context.net[2]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff_context.net[2].lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].ff_context.net[2].lora_A, 140533121657360) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff_context.net[2].lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff_context.net[2].lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].ff_context.net[2].lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff_context.net[2].lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].ff_context.net[2].lora_A['default_0'], 140533121655200) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff_context.net[2].lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff_context.net[2].lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].ff_context.net[2].lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff_context.net[2].lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff_context.net[2].lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].ff_context.net[2].lora_A['default_0'].weight, 140537313931120) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff_context.net[2].lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].ff_context.net[2].lora_B, 140533121654960) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff_context.net[2].lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff_context.net[2].lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].ff_context.net[2].lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff_context.net[2].lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].ff_context.net[2].lora_B['default_0'], 140533121655152) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff_context.net[2].lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff_context.net[2].lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].ff_context.net[2].lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff_context.net[2].base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].ff_context.net[2].base_layer, 140581767529424) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff_context.net[2].base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff_context.net[2].base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].ff_context.net[2].base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff_context.net[2].lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].ff_context.net[2].lora_dropout, 140533121607536) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff_context.net[2].lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff_context.net[2].lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].ff_context.net[2].lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff_context.net[2].lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].ff_context.net[2].lora_dropout['default_0'], 140533121603600) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff_context.net[2].lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff_context.net[2].lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].ff_context.net[2].lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff_context.net[2].scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[1].ff_context.net[2].scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[1].ff_context.net[2].scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff_context.net[2].scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[1].ff_context.net[2].scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff_context.net[2].use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[1].ff_context.net[2].use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[1].ff_context.net[2].use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff_context.net[2].use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].ff_context.net[2].use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff_context.net[2]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff_context.net[2]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff_context.net[2].merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[1].ff_context.net[2].merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[1].ff_context.net[2].merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff_context.net[2]._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].ff_context.net[2]._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff_context.net[2]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff_context.net[2]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff_context.net[2]._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[1].ff_context.net[2]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[1].ff_context.net[2]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff_context._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff_context._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff_context._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].ff_context._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].norm1_context, accessed_by=DictGetItemGuardAccessor(norm1_context) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].norm1_context, 140581773425776) # norm_encoder_hidden_states, c_gate_msa, c_shift_mlp, c_scale_mlp, c_gate_mlp = self.norm1_context( # diffusers/src/diffusers/models/transformers/transformer_flux.py:167 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].norm1_context.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[1].norm1_context.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].norm1_context.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].norm1_context.training, 140591004393440) # norm_encoder_hidden_states, c_gate_msa, c_shift_mlp, c_scale_mlp, c_gate_mlp = self.norm1_context( # diffusers/src/diffusers/models/transformers/transformer_flux.py:167 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].norm1_context.emb, accessed_by=DictGetItemGuardAccessor(emb) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].norm1_context.emb, 140591004478624) # if self.emb is not None: # diffusers/src/diffusers/models/normalization.py:135 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].norm1_context._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].norm1_context.norm, accessed_by=DictGetItemGuardAccessor(norm) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].norm1_context.norm, 140581773425968) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:139 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].norm1_context.norm.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].norm1_context.norm.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].norm1_context.norm.training, 140591004393440) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:139 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].norm1_context.silu, accessed_by=DictGetItemGuardAccessor(silu) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].norm1_context.silu, 140581773425872) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].norm1_context.silu.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].norm1_context.silu.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].norm1_context.silu.training, 140591004393440) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].norm1_context.linear, accessed_by=DictGetItemGuardAccessor(linear) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].norm1_context.linear, 140526788915360) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].norm1_context.linear.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[1].norm1_context.linear.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].norm1_context.linear.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].norm1_context.linear.training, 140591004393408) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].norm1_context.linear._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].norm1_context.linear.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].norm1_context.linear.lora_A, 140526788915024) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].norm1_context.linear.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].norm1_context.linear.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].norm1_context.linear.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].norm1_context.linear.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].norm1_context.linear.lora_A['default_0'], 140526788909072) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].norm1_context.linear.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].norm1_context.linear.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].norm1_context.linear.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].norm1_context.linear.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].norm1_context.linear.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].norm1_context.linear.lora_A['default_0'].weight, 140537312144864) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].norm1_context.linear.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].norm1_context.linear.lora_B, 140526788915312) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].norm1_context.linear.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].norm1_context.linear.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].norm1_context.linear.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].norm1_context.linear.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].norm1_context.linear.lora_B['default_0'], 140526788910752) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].norm1_context.linear.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].norm1_context.linear.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].norm1_context.linear.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].norm1_context.linear.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].norm1_context.linear.base_layer, 140581773425920) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].norm1_context.linear.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].norm1_context.linear.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].norm1_context.linear.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].norm1_context.linear.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].norm1_context.linear.lora_dropout, 140526788918384) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].norm1_context.linear.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].norm1_context.linear.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].norm1_context.linear.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].norm1_context.linear.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].norm1_context.linear.lora_dropout['default_0'], 140526788910656) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].norm1_context.linear.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].norm1_context.linear.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].norm1_context.linear.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].norm1_context.linear.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[1].norm1_context.linear.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[1].norm1_context.linear.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].norm1_context.linear.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[1].norm1_context.linear.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].norm1_context.linear.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[1].norm1_context.linear.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[1].norm1_context.linear.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].norm1_context.linear.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].norm1_context.linear.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].norm1_context.linear._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].norm1_context.linear._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].norm1_context.linear.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[1].norm1_context.linear.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[1].norm1_context.linear.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].norm1_context.linear._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].norm1_context.linear._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].norm1_context.linear._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].norm1_context.linear._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].norm1_context.linear._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[1].norm1_context.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[1].norm1_context.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].norm1_context._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].norm1_context._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].norm1_context._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].norm1_context._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].norm2_context, accessed_by=DictGetItemGuardAccessor(norm2_context) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].norm2_context, 140581767528800) # norm_encoder_hidden_states = self.norm2_context(encoder_hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:195 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].norm2_context.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1].norm2_context.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[1].norm2_context.training, 140591004393440) # norm_encoder_hidden_states = self.norm2_context(encoder_hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:195 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[1]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | +- GuardManager: source=L['self'].transformer_blocks[2], accessed_by=GetItemGuardAccessor(2) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2], 140581773425248) # for index_block, block in enumerate(self.transformer_blocks): # diffusers/src/diffusers/models/transformers/transformer_flux.py:471 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[2].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].training, 140591004393440) # for index_block, block in enumerate(self.transformer_blocks): # diffusers/src/diffusers/models/transformers/transformer_flux.py:471 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff, accessed_by=DictGetItemGuardAccessor(ff) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].ff, 140581767530720) # ff_output = self.ff(norm_hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:185 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[2].ff.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].ff.training, 140591004393440) # ff_output = self.ff(norm_hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:185 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff.net, accessed_by=DictGetItemGuardAccessor(net) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].ff.net, 140581767530960) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[2].ff.net, 93831537618768) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- LENGTH_CHECK: len(L['self'].transformer_blocks[2].ff.net) == 3 # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff.net.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff.net.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].ff.net.training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff.net[0], accessed_by=GetItemGuardAccessor(0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].ff.net[0], 140581767530912) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff.net[0].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[2].ff.net[0].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff.net[0].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].ff.net[0].training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff.net[0]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff.net[0].proj, accessed_by=DictGetItemGuardAccessor(proj) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].ff.net[0].proj, 140533121401232) # hidden_states = self.proj(hidden_states) # diffusers/src/diffusers/models/activations.py:88 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff.net[0].proj.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[2].ff.net[0].proj.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff.net[0].proj.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].ff.net[0].proj.training, 140591004393408) # hidden_states = self.proj(hidden_states) # diffusers/src/diffusers/models/activations.py:88 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff.net[0].proj._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff.net[0].proj.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].ff.net[0].proj.lora_A, 140533121403008) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff.net[0].proj.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff.net[0].proj.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].ff.net[0].proj.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff.net[0].proj.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].ff.net[0].proj.lora_A['default_0'], 140533120098336) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff.net[0].proj.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff.net[0].proj.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].ff.net[0].proj.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff.net[0].proj.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff.net[0].proj.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].ff.net[0].proj.lora_A['default_0'].weight, 140531603221424) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff.net[0].proj.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].ff.net[0].proj.lora_B, 140533121409200) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff.net[0].proj.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff.net[0].proj.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].ff.net[0].proj.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff.net[0].proj.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].ff.net[0].proj.lora_B['default_0'], 140533120106352) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff.net[0].proj.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff.net[0].proj.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].ff.net[0].proj.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff.net[0].proj.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].ff.net[0].proj.base_layer, 140581767531008) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff.net[0].proj.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff.net[0].proj.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].ff.net[0].proj.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff.net[0].proj.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].ff.net[0].proj.lora_dropout, 140533121410448) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff.net[0].proj.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff.net[0].proj.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].ff.net[0].proj.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff.net[0].proj.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].ff.net[0].proj.lora_dropout['default_0'], 140533121408480) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff.net[0].proj.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff.net[0].proj.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].ff.net[0].proj.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff.net[0].proj.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[2].ff.net[0].proj.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[2].ff.net[0].proj.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff.net[0].proj.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[2].ff.net[0].proj.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff.net[0].proj.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[2].ff.net[0].proj.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[2].ff.net[0].proj.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff.net[0].proj.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].ff.net[0].proj.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff.net[0].proj._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff.net[0].proj._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff.net[0].proj.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[2].ff.net[0].proj.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[2].ff.net[0].proj.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff.net[0].proj._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].ff.net[0].proj._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff.net[0].proj._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff.net[0].proj._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff.net[0].proj._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[2].ff.net[0].proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[2].ff.net[0].proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff.net[0].approximate, accessed_by=DictGetItemGuardAccessor(approximate) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[2].ff.net[0].approximate == 'tanh' # return F.gelu(gate, approximate=self.approximate) # diffusers/src/diffusers/models/activations.py:83 in gelu V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff.net[0]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff.net[0]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff.net[0]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff.net[0]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff.net[1], accessed_by=GetItemGuardAccessor(1) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].ff.net[1], 140581767531056) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff.net[1].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff.net[1].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].ff.net[1].training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff.net[2], accessed_by=GetItemGuardAccessor(2) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].ff.net[2], 140533120101168) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff.net[2].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[2].ff.net[2].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff.net[2].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].ff.net[2].training, 140591004393408) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff.net[2]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff.net[2].lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].ff.net[2].lora_A, 140533120101600) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff.net[2].lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff.net[2].lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].ff.net[2].lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff.net[2].lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].ff.net[2].lora_A['default_0'], 140533120100688) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff.net[2].lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff.net[2].lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].ff.net[2].lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff.net[2].lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff.net[2].lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].ff.net[2].lora_A['default_0'].weight, 140531603221504) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff.net[2].lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].ff.net[2].lora_B, 140533120101504) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff.net[2].lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff.net[2].lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].ff.net[2].lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff.net[2].lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].ff.net[2].lora_B['default_0'], 140533120106304) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff.net[2].lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff.net[2].lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].ff.net[2].lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff.net[2].base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].ff.net[2].base_layer, 140581767531104) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff.net[2].base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff.net[2].base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].ff.net[2].base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff.net[2].lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].ff.net[2].lora_dropout, 140533120100544) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff.net[2].lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff.net[2].lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].ff.net[2].lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff.net[2].lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].ff.net[2].lora_dropout['default_0'], 140533120102896) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff.net[2].lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff.net[2].lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].ff.net[2].lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff.net[2].scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[2].ff.net[2].scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[2].ff.net[2].scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff.net[2].scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[2].ff.net[2].scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff.net[2].use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[2].ff.net[2].use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[2].ff.net[2].use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff.net[2].use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].ff.net[2].use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff.net[2]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff.net[2]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff.net[2].merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[2].ff.net[2].merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[2].ff.net[2].merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff.net[2]._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].ff.net[2]._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff.net[2]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff.net[2]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff.net[2]._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[2].ff.net[2]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[2].ff.net[2]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn, accessed_by=DictGetItemGuardAccessor(attn) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn, 140581767529952) # attn_output, context_attn_output = self.attn( # diffusers/src/diffusers/models/transformers/transformer_flux.py:172 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[2].attn.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.training, 140591004393440) # attn_output, context_attn_output = self.attn( # diffusers/src/diffusers/models/transformers/transformer_flux.py:172 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_k, accessed_by=DictGetItemGuardAccessor(to_k) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.to_k, 140533121653424) # key = attn.to_k(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1717 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[2].attn.to_k.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.to_k.training, 140591004393408) # key = attn.to_k(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1717 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_k._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_k.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.to_k.lora_A, 140533121655680) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_k.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_k.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.to_k.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_k.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.to_k.lora_A['default_0'], 140533121651600) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_k.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_k.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.to_k.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_k.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_k.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.to_k.lora_A['default_0'].weight, 140542619522112) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_k.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.to_k.lora_B, 140533121652608) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_k.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_k.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.to_k.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_k.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.to_k.lora_B['default_0'], 140533121649296) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_k.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_k.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.to_k.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_k.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.to_k.base_layer, 140581767530096) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_k.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_k.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.to_k.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_k.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.to_k.lora_dropout, 140533121651696) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_k.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_k.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.to_k.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_k.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.to_k.lora_dropout['default_0'], 140533121653904) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_k.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_k.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.to_k.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_k.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[2].attn.to_k.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[2].attn.to_k.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_k.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[2].attn.to_k.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_k.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[2].attn.to_k.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[2].attn.to_k.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_k.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.to_k.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_k._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_k._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_k.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[2].attn.to_k.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[2].attn.to_k.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_k._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.to_k._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_k._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_k._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_k._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[2].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[2].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_q, accessed_by=DictGetItemGuardAccessor(to_q) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.to_q, 140533121647520) # query = attn.to_q(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1716 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[2].attn.to_q.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.to_q.training, 140591004393408) # query = attn.to_q(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1716 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_q._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_q.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.to_q.lora_A, 140533121653280) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_q.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_q.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.to_q.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_q.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.to_q.lora_A['default_0'], 140533121657456) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_q.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_q.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.to_q.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_q.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_q.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.to_q.lora_A['default_0'].weight, 140537316663728) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_q.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.to_q.lora_B, 140533121656592) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_q.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_q.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.to_q.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_q.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.to_q.lora_B['default_0'], 140533121655056) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_q.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_q.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.to_q.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_q.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.to_q.base_layer, 140581767530192) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_q.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_q.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.to_q.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_q.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.to_q.lora_dropout, 140533121650208) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_q.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_q.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.to_q.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_q.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.to_q.lora_dropout['default_0'], 140533121652848) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_q.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_q.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.to_q.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_q.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[2].attn.to_q.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[2].attn.to_q.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_q.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[2].attn.to_q.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_q.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[2].attn.to_q.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[2].attn.to_q.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_q.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.to_q.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_q._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_q._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_q.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[2].attn.to_q.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[2].attn.to_q.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_q._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.to_q._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_q._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_q._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_q._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[2].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[2].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_v, accessed_by=DictGetItemGuardAccessor(to_v) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.to_v, 140533121649776) # value = attn.to_v(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1718 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_v.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[2].attn.to_v.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_v.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.to_v.training, 140591004393408) # value = attn.to_v(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1718 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_v._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_v.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.to_v.lora_A, 140533121648288) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_v.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_v.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.to_v.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_v.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.to_v.lora_A['default_0'], 140533121655488) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_v.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_v.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.to_v.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_v.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_v.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.to_v.lora_A['default_0'].weight, 140542619519392) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_v.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.to_v.lora_B, 140533121649824) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_v.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_v.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.to_v.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_v.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.to_v.lora_B['default_0'], 140533121649344) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_v.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_v.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.to_v.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_v.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.to_v.base_layer, 140581767530288) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_v.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_v.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.to_v.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_v.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.to_v.lora_dropout, 140533121650112) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_v.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_v.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.to_v.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_v.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.to_v.lora_dropout['default_0'], 140533121647808) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_v.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_v.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.to_v.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_v.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[2].attn.to_v.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[2].attn.to_v.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_v.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[2].attn.to_v.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_v.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[2].attn.to_v.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[2].attn.to_v.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_v.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.to_v.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_v._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_v._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_v.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[2].attn.to_v.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[2].attn.to_v.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_v._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.to_v._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_v._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_v._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_v._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[2].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[2].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.norm_k, accessed_by=DictGetItemGuardAccessor(norm_k) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.norm_k, 140581767530144) # if attn.norm_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1729 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.norm_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[2].attn.norm_k.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.norm_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.norm_k.training, 140591004393440) # if attn.norm_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1729 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.norm_k.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[2].attn.norm_k.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.norm_k._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.norm_k.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.norm_k.weight, 140581772708016) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.norm_k._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.norm_k._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.norm_k._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.norm_k._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.norm_q, accessed_by=DictGetItemGuardAccessor(norm_q) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.norm_q, 140581767530048) # if attn.norm_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1727 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.norm_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[2].attn.norm_q.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.norm_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.norm_q.training, 140591004393440) # if attn.norm_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1727 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.norm_q.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[2].attn.norm_q.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.norm_q._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.norm_q.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.norm_q.weight, 140581773245904) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.norm_q._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.norm_q._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.norm_q._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.norm_q._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_out, accessed_by=DictGetItemGuardAccessor(to_out) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.to_out, 140581767530480) # hidden_states = attn.to_out[0](hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1776 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_out.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_out.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.to_out.training, 140591004393440) # hidden_states = attn.to_out[0](hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1776 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_out[0], accessed_by=GetItemGuardAccessor(0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.to_out[0], 140533121416496) # hidden_states = attn.to_out[0](hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1776 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_out[0].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[2].attn.to_out[0].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_out[0].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.to_out[0].training, 140591004393408) # hidden_states = attn.to_out[0](hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1776 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_out[0]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_out[0].lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.to_out[0].lora_A, 140533121410592) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_out[0].lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_out[0].lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.to_out[0].lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_out[0].lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.to_out[0].lora_A['default_0'], 140533121406080) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_out[0].lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_out[0].lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.to_out[0].lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_out[0].lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_out[0].lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.to_out[0].lora_A['default_0'].weight, 140537312733168) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_out[0].lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.to_out[0].lora_B, 140533121411888) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_out[0].lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_out[0].lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.to_out[0].lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_out[0].lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.to_out[0].lora_B['default_0'], 140533121415344) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_out[0].lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_out[0].lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.to_out[0].lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_out[0].base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.to_out[0].base_layer, 140581767530528) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_out[0].base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_out[0].base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.to_out[0].base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_out[0].lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.to_out[0].lora_dropout, 140533121412560) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_out[0].lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_out[0].lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.to_out[0].lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_out[0].lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.to_out[0].lora_dropout['default_0'], 140533121413808) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_out[0].lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_out[0].lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.to_out[0].lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_out[0].scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[2].attn.to_out[0].scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[2].attn.to_out[0].scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_out[0].scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[2].attn.to_out[0].scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_out[0].use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[2].attn.to_out[0].use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[2].attn.to_out[0].use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_out[0].use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.to_out[0].use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_out[0]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_out[0]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_out[0].merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[2].attn.to_out[0].merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[2].attn.to_out[0].merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_out[0]._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.to_out[0]._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_out[0]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_out[0]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_out[0]._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[2].attn.to_out[0]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[2].attn.to_out[0]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_out[1], accessed_by=GetItemGuardAccessor(1) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.to_out[1], 140581767530576) # hidden_states = attn.to_out[1](hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1778 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_out[1].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_out[1].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.to_out[1].training, 140591004393440) # hidden_states = attn.to_out[1](hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1778 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.add_k_proj, accessed_by=DictGetItemGuardAccessor(add_k_proj) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.add_k_proj, 140533121090560) # encoder_hidden_states_key_proj = attn.add_k_proj(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1736 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.add_k_proj.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[2].attn.add_k_proj.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.add_k_proj.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.add_k_proj.training, 140591004393408) # encoder_hidden_states_key_proj = attn.add_k_proj(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1736 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.add_k_proj._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.add_k_proj.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.add_k_proj.lora_A, 140533121091472) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.add_k_proj.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.add_k_proj.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.add_k_proj.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.add_k_proj.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.add_k_proj.lora_A['default_0'], 140533121090848) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.add_k_proj.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.add_k_proj.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.add_k_proj.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.add_k_proj.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.add_k_proj.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.add_k_proj.lora_A['default_0'].weight, 140537312729808) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.add_k_proj.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.add_k_proj.lora_B, 140533121094064) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.add_k_proj.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.add_k_proj.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.add_k_proj.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.add_k_proj.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.add_k_proj.lora_B['default_0'], 140533121090752) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.add_k_proj.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.add_k_proj.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.add_k_proj.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.add_k_proj.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.add_k_proj.base_layer, 140581767530336) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.add_k_proj.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.add_k_proj.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.add_k_proj.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.add_k_proj.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.add_k_proj.lora_dropout, 140533121093536) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.add_k_proj.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.add_k_proj.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.add_k_proj.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.add_k_proj.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.add_k_proj.lora_dropout['default_0'], 140533121094400) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.add_k_proj.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.add_k_proj.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.add_k_proj.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.add_k_proj.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[2].attn.add_k_proj.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[2].attn.add_k_proj.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.add_k_proj.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[2].attn.add_k_proj.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.add_k_proj.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[2].attn.add_k_proj.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[2].attn.add_k_proj.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.add_k_proj.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.add_k_proj.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.add_k_proj._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.add_k_proj._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.add_k_proj.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[2].attn.add_k_proj.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[2].attn.add_k_proj.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.add_k_proj._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.add_k_proj._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.add_k_proj._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.add_k_proj._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.add_k_proj._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[2].attn.add_k_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[2].attn.add_k_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.add_q_proj, accessed_by=DictGetItemGuardAccessor(add_q_proj) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.add_q_proj, 140533121414336) # encoder_hidden_states_query_proj = attn.add_q_proj(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1735 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.add_q_proj.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[2].attn.add_q_proj.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.add_q_proj.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.add_q_proj.training, 140591004393408) # encoder_hidden_states_query_proj = attn.add_q_proj(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1735 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.add_q_proj._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.add_q_proj.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.add_q_proj.lora_A, 140533121413136) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.add_q_proj.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.add_q_proj.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.add_q_proj.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.add_q_proj.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.add_q_proj.lora_A['default_0'], 140533121413184) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.add_q_proj.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.add_q_proj.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.add_q_proj.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.add_q_proj.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.add_q_proj.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.add_q_proj.lora_A['default_0'].weight, 140537312738528) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.add_q_proj.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.add_q_proj.lora_B, 140533121414768) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.add_q_proj.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.add_q_proj.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.add_q_proj.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.add_q_proj.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.add_q_proj.lora_B['default_0'], 140533121413280) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.add_q_proj.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.add_q_proj.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.add_q_proj.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.add_q_proj.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.add_q_proj.base_layer, 140581767530432) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.add_q_proj.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.add_q_proj.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.add_q_proj.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.add_q_proj.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.add_q_proj.lora_dropout, 140533121411984) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.add_q_proj.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.add_q_proj.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.add_q_proj.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.add_q_proj.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.add_q_proj.lora_dropout['default_0'], 140533121414720) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.add_q_proj.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.add_q_proj.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.add_q_proj.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.add_q_proj.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[2].attn.add_q_proj.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[2].attn.add_q_proj.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.add_q_proj.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[2].attn.add_q_proj.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.add_q_proj.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[2].attn.add_q_proj.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[2].attn.add_q_proj.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.add_q_proj.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.add_q_proj.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.add_q_proj._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.add_q_proj._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.add_q_proj.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[2].attn.add_q_proj.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[2].attn.add_q_proj.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.add_q_proj._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.add_q_proj._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.add_q_proj._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.add_q_proj._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.add_q_proj._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[2].attn.add_q_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[2].attn.add_q_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.add_v_proj, accessed_by=DictGetItemGuardAccessor(add_v_proj) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.add_v_proj, 140533121090512) # encoder_hidden_states_value_proj = attn.add_v_proj(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1737 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.add_v_proj.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[2].attn.add_v_proj.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.add_v_proj.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.add_v_proj.training, 140591004393408) # encoder_hidden_states_value_proj = attn.add_v_proj(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1737 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.add_v_proj._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.add_v_proj.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.add_v_proj.lora_A, 140533121097952) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.add_v_proj.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.add_v_proj.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.add_v_proj.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.add_v_proj.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.add_v_proj.lora_A['default_0'], 140533121414192) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.add_v_proj.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.add_v_proj.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.add_v_proj.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.add_v_proj.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.add_v_proj.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.add_v_proj.lora_A['default_0'].weight, 140537312737648) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.add_v_proj.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.add_v_proj.lora_B, 140533121105344) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.add_v_proj.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.add_v_proj.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.add_v_proj.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.add_v_proj.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.add_v_proj.lora_B['default_0'], 140533121414096) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.add_v_proj.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.add_v_proj.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.add_v_proj.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.add_v_proj.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.add_v_proj.base_layer, 140581767530384) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.add_v_proj.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.add_v_proj.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.add_v_proj.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.add_v_proj.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.add_v_proj.lora_dropout, 140533121104576) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.add_v_proj.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.add_v_proj.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.add_v_proj.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.add_v_proj.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.add_v_proj.lora_dropout['default_0'], 140533121105824) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.add_v_proj.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.add_v_proj.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.add_v_proj.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.add_v_proj.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[2].attn.add_v_proj.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[2].attn.add_v_proj.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.add_v_proj.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[2].attn.add_v_proj.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.add_v_proj.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[2].attn.add_v_proj.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[2].attn.add_v_proj.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.add_v_proj.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.add_v_proj.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.add_v_proj._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.add_v_proj._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.add_v_proj.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[2].attn.add_v_proj.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[2].attn.add_v_proj.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.add_v_proj._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.add_v_proj._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.add_v_proj._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.add_v_proj._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.add_v_proj._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[2].attn.add_v_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[2].attn.add_v_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_add_out, accessed_by=DictGetItemGuardAccessor(to_add_out) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.to_add_out, 140533121411744) # encoder_hidden_states = attn.to_add_out(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1779 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_add_out.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[2].attn.to_add_out.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_add_out.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.to_add_out.training, 140591004393408) # encoder_hidden_states = attn.to_add_out(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1779 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_add_out._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_add_out.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.to_add_out.lora_A, 140533121405936) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_add_out.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_add_out.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.to_add_out.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_add_out.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.to_add_out.lora_A['default_0'], 140533121406608) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_add_out.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_add_out.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.to_add_out.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_add_out.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_add_out.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.to_add_out.lora_A['default_0'].weight, 140531603215824) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_add_out.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.to_add_out.lora_B, 140533121413760) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_add_out.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_add_out.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.to_add_out.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_add_out.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.to_add_out.lora_B['default_0'], 140533121400944) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_add_out.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_add_out.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.to_add_out.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_add_out.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.to_add_out.base_layer, 140581767530624) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_add_out.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_add_out.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.to_add_out.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_add_out.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.to_add_out.lora_dropout, 140533121407664) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_add_out.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_add_out.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.to_add_out.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_add_out.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.to_add_out.lora_dropout['default_0'], 140533121414816) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_add_out.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_add_out.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.to_add_out.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_add_out.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[2].attn.to_add_out.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[2].attn.to_add_out.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_add_out.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[2].attn.to_add_out.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_add_out.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[2].attn.to_add_out.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[2].attn.to_add_out.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_add_out.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.to_add_out.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_add_out._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_add_out._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_add_out.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[2].attn.to_add_out.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[2].attn.to_add_out.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_add_out._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.to_add_out._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_add_out._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_add_out._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.to_add_out._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[2].attn.to_add_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[2].attn.to_add_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.norm_added_k, accessed_by=DictGetItemGuardAccessor(norm_added_k) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.norm_added_k, 140581767530768) # if attn.norm_added_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1751 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.norm_added_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[2].attn.norm_added_k.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.norm_added_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.norm_added_k.training, 140591004393440) # if attn.norm_added_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1751 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.norm_added_k.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[2].attn.norm_added_k.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.norm_added_k._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.norm_added_k.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.norm_added_k.weight, 140581766000416) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.norm_added_k._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.norm_added_k._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.norm_added_k._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.norm_added_k._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.norm_added_q, accessed_by=DictGetItemGuardAccessor(norm_added_q) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.norm_added_q, 140581767530672) # if attn.norm_added_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1749 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.norm_added_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[2].attn.norm_added_q.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.norm_added_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.norm_added_q.training, 140591004393440) # if attn.norm_added_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1749 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.norm_added_q.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[2].attn.norm_added_q.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.norm_added_q._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.norm_added_q.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.norm_added_q.weight, 140581766000496) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.norm_added_q._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.norm_added_q._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.norm_added_q._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.norm_added_q._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.heads, accessed_by=DictGetItemGuardAccessor(heads) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[2].attn.heads == 24 # head_dim = inner_dim // attn.heads # diffusers/src/diffusers/models/attention_processor.py:1721 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn.processor, accessed_by=DictGetItemGuardAccessor(processor) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[2].attn.processor, 93831581524080) # attn_parameters = set(inspect.signature(self.processor.__call__).parameters.keys()) # diffusers/src/diffusers/models/attention_processor.py:479 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].attn.processor, 140581767529904) # return self.processor( # diffusers/src/diffusers/models/attention_processor.py:490 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].attn._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].norm1, accessed_by=DictGetItemGuardAccessor(norm1) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].norm1, 140581767529472) # norm_hidden_states, gate_msa, shift_mlp, scale_mlp, gate_mlp = self.norm1(hidden_states, emb=temb) # diffusers/src/diffusers/models/transformers/transformer_flux.py:165 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].norm1.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[2].norm1.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].norm1.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].norm1.training, 140591004393440) # norm_hidden_states, gate_msa, shift_mlp, scale_mlp, gate_mlp = self.norm1(hidden_states, emb=temb) # diffusers/src/diffusers/models/transformers/transformer_flux.py:165 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].norm1.emb, accessed_by=DictGetItemGuardAccessor(emb) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].norm1.emb, 140591004478624) # if self.emb is not None: # diffusers/src/diffusers/models/normalization.py:135 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].norm1._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].norm1.norm, accessed_by=DictGetItemGuardAccessor(norm) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].norm1.norm, 140581767529616) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:139 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].norm1.norm.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].norm1.norm.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].norm1.norm.training, 140591004393440) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:139 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].norm1.silu, accessed_by=DictGetItemGuardAccessor(silu) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].norm1.silu, 140581767529520) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].norm1.silu.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].norm1.silu.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].norm1.silu.training, 140591004393440) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].norm1.linear, accessed_by=DictGetItemGuardAccessor(linear) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].norm1.linear, 140533121656880) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].norm1.linear.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[2].norm1.linear.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].norm1.linear.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].norm1.linear.training, 140591004393408) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].norm1.linear._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].norm1.linear.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].norm1.linear.lora_A, 140533121656352) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].norm1.linear.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].norm1.linear.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].norm1.linear.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].norm1.linear.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].norm1.linear.lora_A['default_0'], 140533121656928) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].norm1.linear.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].norm1.linear.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].norm1.linear.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].norm1.linear.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].norm1.linear.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].norm1.linear.lora_A['default_0'].weight, 140537321667568) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].norm1.linear.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].norm1.linear.lora_B, 140533121657600) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].norm1.linear.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].norm1.linear.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].norm1.linear.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].norm1.linear.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].norm1.linear.lora_B['default_0'], 140533121657504) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].norm1.linear.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].norm1.linear.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].norm1.linear.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].norm1.linear.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].norm1.linear.base_layer, 140581767529568) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].norm1.linear.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].norm1.linear.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].norm1.linear.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].norm1.linear.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].norm1.linear.lora_dropout, 140533121657552) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].norm1.linear.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].norm1.linear.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].norm1.linear.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].norm1.linear.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].norm1.linear.lora_dropout['default_0'], 140533121657264) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].norm1.linear.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].norm1.linear.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].norm1.linear.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].norm1.linear.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[2].norm1.linear.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[2].norm1.linear.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].norm1.linear.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[2].norm1.linear.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].norm1.linear.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[2].norm1.linear.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[2].norm1.linear.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].norm1.linear.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].norm1.linear.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].norm1.linear._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].norm1.linear._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].norm1.linear.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[2].norm1.linear.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[2].norm1.linear.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].norm1.linear._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].norm1.linear._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].norm1.linear._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].norm1.linear._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].norm1.linear._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[2].norm1.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[2].norm1.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].norm1._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].norm1._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].norm1._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].norm1._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].norm2, accessed_by=DictGetItemGuardAccessor(norm2) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].norm2, 140581767530816) # norm_hidden_states = self.norm2(hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:182 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].norm2.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].norm2.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].norm2.training, 140591004393440) # norm_hidden_states = self.norm2(hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:182 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff_context, accessed_by=DictGetItemGuardAccessor(ff_context) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].ff_context, 140581767531152) # context_ff_output = self.ff_context(norm_encoder_hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:198 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff_context.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[2].ff_context.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff_context.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].ff_context.training, 140591004393440) # context_ff_output = self.ff_context(norm_encoder_hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:198 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff_context._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff_context.net, accessed_by=DictGetItemGuardAccessor(net) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].ff_context.net, 140581767531296) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[2].ff_context.net, 93831537618768) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- LENGTH_CHECK: len(L['self'].transformer_blocks[2].ff_context.net) == 3 # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff_context.net.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff_context.net.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].ff_context.net.training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff_context.net[0], accessed_by=GetItemGuardAccessor(0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].ff_context.net[0], 140581767531248) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff_context.net[0].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[2].ff_context.net[0].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff_context.net[0].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].ff_context.net[0].training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff_context.net[0]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff_context.net[0].proj, accessed_by=DictGetItemGuardAccessor(proj) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].ff_context.net[0].proj, 140533120099680) # hidden_states = self.proj(hidden_states) # diffusers/src/diffusers/models/activations.py:88 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff_context.net[0].proj.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[2].ff_context.net[0].proj.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff_context.net[0].proj.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].ff_context.net[0].proj.training, 140591004393408) # hidden_states = self.proj(hidden_states) # diffusers/src/diffusers/models/activations.py:88 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff_context.net[0].proj._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff_context.net[0].proj.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].ff_context.net[0].proj.lora_A, 140533120100064) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff_context.net[0].proj.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff_context.net[0].proj.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].ff_context.net[0].proj.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff_context.net[0].proj.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].ff_context.net[0].proj.lora_A['default_0'], 140533120095168) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff_context.net[0].proj.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff_context.net[0].proj.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].ff_context.net[0].proj.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff_context.net[0].proj.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff_context.net[0].proj.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].ff_context.net[0].proj.lora_A['default_0'].weight, 140531603222224) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff_context.net[0].proj.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].ff_context.net[0].proj.lora_B, 140533120098096) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff_context.net[0].proj.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff_context.net[0].proj.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].ff_context.net[0].proj.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff_context.net[0].proj.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].ff_context.net[0].proj.lora_B['default_0'], 140533120098048) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff_context.net[0].proj.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff_context.net[0].proj.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].ff_context.net[0].proj.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff_context.net[0].proj.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].ff_context.net[0].proj.base_layer, 140581767531344) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff_context.net[0].proj.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff_context.net[0].proj.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].ff_context.net[0].proj.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff_context.net[0].proj.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].ff_context.net[0].proj.lora_dropout, 140533120100304) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff_context.net[0].proj.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff_context.net[0].proj.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].ff_context.net[0].proj.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff_context.net[0].proj.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].ff_context.net[0].proj.lora_dropout['default_0'], 140533120099392) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff_context.net[0].proj.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff_context.net[0].proj.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].ff_context.net[0].proj.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff_context.net[0].proj.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[2].ff_context.net[0].proj.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[2].ff_context.net[0].proj.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff_context.net[0].proj.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[2].ff_context.net[0].proj.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff_context.net[0].proj.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[2].ff_context.net[0].proj.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[2].ff_context.net[0].proj.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff_context.net[0].proj.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].ff_context.net[0].proj.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff_context.net[0].proj._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff_context.net[0].proj._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff_context.net[0].proj.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[2].ff_context.net[0].proj.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[2].ff_context.net[0].proj.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff_context.net[0].proj._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].ff_context.net[0].proj._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff_context.net[0].proj._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff_context.net[0].proj._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff_context.net[0].proj._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[2].ff_context.net[0].proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[2].ff_context.net[0].proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff_context.net[0].approximate, accessed_by=DictGetItemGuardAccessor(approximate) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[2].ff_context.net[0].approximate == 'tanh' # return F.gelu(gate, approximate=self.approximate) # diffusers/src/diffusers/models/activations.py:83 in gelu V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff_context.net[0]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff_context.net[0]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff_context.net[0]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff_context.net[0]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff_context.net[1], accessed_by=GetItemGuardAccessor(1) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].ff_context.net[1], 140581767531440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff_context.net[1].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff_context.net[1].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].ff_context.net[1].training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff_context.net[2], accessed_by=GetItemGuardAccessor(2) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].ff_context.net[2], 140533120098144) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff_context.net[2].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[2].ff_context.net[2].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff_context.net[2].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].ff_context.net[2].training, 140591004393408) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff_context.net[2]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff_context.net[2].lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].ff_context.net[2].lora_A, 140533120097184) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff_context.net[2].lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff_context.net[2].lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].ff_context.net[2].lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff_context.net[2].lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].ff_context.net[2].lora_A['default_0'], 140533120096320) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff_context.net[2].lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff_context.net[2].lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].ff_context.net[2].lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff_context.net[2].lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff_context.net[2].lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].ff_context.net[2].lora_A['default_0'].weight, 140531603215344) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff_context.net[2].lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].ff_context.net[2].lora_B, 140533120097328) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff_context.net[2].lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff_context.net[2].lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].ff_context.net[2].lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff_context.net[2].lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].ff_context.net[2].lora_B['default_0'], 140533120097808) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff_context.net[2].lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff_context.net[2].lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].ff_context.net[2].lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff_context.net[2].base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].ff_context.net[2].base_layer, 140581767531488) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff_context.net[2].base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff_context.net[2].base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].ff_context.net[2].base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff_context.net[2].lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].ff_context.net[2].lora_dropout, 140533120090800) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff_context.net[2].lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff_context.net[2].lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].ff_context.net[2].lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff_context.net[2].lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].ff_context.net[2].lora_dropout['default_0'], 140533120097664) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff_context.net[2].lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff_context.net[2].lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].ff_context.net[2].lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff_context.net[2].scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[2].ff_context.net[2].scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[2].ff_context.net[2].scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff_context.net[2].scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[2].ff_context.net[2].scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff_context.net[2].use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[2].ff_context.net[2].use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[2].ff_context.net[2].use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff_context.net[2].use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].ff_context.net[2].use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff_context.net[2]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff_context.net[2]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff_context.net[2].merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[2].ff_context.net[2].merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[2].ff_context.net[2].merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff_context.net[2]._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].ff_context.net[2]._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff_context.net[2]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff_context.net[2]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff_context.net[2]._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[2].ff_context.net[2]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[2].ff_context.net[2]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff_context._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff_context._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff_context._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].ff_context._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].norm1_context, accessed_by=DictGetItemGuardAccessor(norm1_context) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].norm1_context, 140581767529664) # norm_encoder_hidden_states, c_gate_msa, c_shift_mlp, c_scale_mlp, c_gate_mlp = self.norm1_context( # diffusers/src/diffusers/models/transformers/transformer_flux.py:167 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].norm1_context.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[2].norm1_context.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].norm1_context.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].norm1_context.training, 140591004393440) # norm_encoder_hidden_states, c_gate_msa, c_shift_mlp, c_scale_mlp, c_gate_mlp = self.norm1_context( # diffusers/src/diffusers/models/transformers/transformer_flux.py:167 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].norm1_context.emb, accessed_by=DictGetItemGuardAccessor(emb) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].norm1_context.emb, 140591004478624) # if self.emb is not None: # diffusers/src/diffusers/models/normalization.py:135 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].norm1_context._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].norm1_context.norm, accessed_by=DictGetItemGuardAccessor(norm) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].norm1_context.norm, 140581767529856) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:139 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].norm1_context.norm.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].norm1_context.norm.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].norm1_context.norm.training, 140591004393440) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:139 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].norm1_context.silu, accessed_by=DictGetItemGuardAccessor(silu) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].norm1_context.silu, 140581767529760) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].norm1_context.silu.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].norm1_context.silu.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].norm1_context.silu.training, 140591004393440) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].norm1_context.linear, accessed_by=DictGetItemGuardAccessor(linear) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].norm1_context.linear, 140533121661536) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].norm1_context.linear.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[2].norm1_context.linear.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].norm1_context.linear.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].norm1_context.linear.training, 140591004393408) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].norm1_context.linear._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].norm1_context.linear.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].norm1_context.linear.lora_A, 140533121651504) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].norm1_context.linear.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].norm1_context.linear.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].norm1_context.linear.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].norm1_context.linear.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].norm1_context.linear.lora_A['default_0'], 140533121658656) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].norm1_context.linear.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].norm1_context.linear.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].norm1_context.linear.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].norm1_context.linear.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].norm1_context.linear.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].norm1_context.linear.lora_A['default_0'].weight, 140537321656768) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].norm1_context.linear.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].norm1_context.linear.lora_B, 140533121659712) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].norm1_context.linear.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].norm1_context.linear.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].norm1_context.linear.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].norm1_context.linear.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].norm1_context.linear.lora_B['default_0'], 140533121659520) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].norm1_context.linear.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].norm1_context.linear.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].norm1_context.linear.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].norm1_context.linear.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].norm1_context.linear.base_layer, 140581767529808) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].norm1_context.linear.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].norm1_context.linear.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].norm1_context.linear.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].norm1_context.linear.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].norm1_context.linear.lora_dropout, 140533121656544) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].norm1_context.linear.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].norm1_context.linear.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].norm1_context.linear.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].norm1_context.linear.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].norm1_context.linear.lora_dropout['default_0'], 140533121651792) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].norm1_context.linear.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].norm1_context.linear.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].norm1_context.linear.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].norm1_context.linear.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[2].norm1_context.linear.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[2].norm1_context.linear.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].norm1_context.linear.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[2].norm1_context.linear.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].norm1_context.linear.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[2].norm1_context.linear.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[2].norm1_context.linear.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].norm1_context.linear.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].norm1_context.linear.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].norm1_context.linear._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].norm1_context.linear._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].norm1_context.linear.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[2].norm1_context.linear.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[2].norm1_context.linear.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].norm1_context.linear._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].norm1_context.linear._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].norm1_context.linear._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].norm1_context.linear._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].norm1_context.linear._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[2].norm1_context.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[2].norm1_context.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].norm1_context._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].norm1_context._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].norm1_context._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].norm1_context._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].norm2_context, accessed_by=DictGetItemGuardAccessor(norm2_context) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].norm2_context, 140581767530864) # norm_encoder_hidden_states = self.norm2_context(encoder_hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:195 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].norm2_context.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2].norm2_context.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[2].norm2_context.training, 140591004393440) # norm_encoder_hidden_states = self.norm2_context(encoder_hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:195 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[2]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | +- GuardManager: source=L['self'].transformer_blocks[3], accessed_by=GetItemGuardAccessor(3) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3], 140581767529136) # for index_block, block in enumerate(self.transformer_blocks): # diffusers/src/diffusers/models/transformers/transformer_flux.py:471 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[3].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].training, 140591004393440) # for index_block, block in enumerate(self.transformer_blocks): # diffusers/src/diffusers/models/transformers/transformer_flux.py:471 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff, accessed_by=DictGetItemGuardAccessor(ff) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].ff, 140581767532784) # ff_output = self.ff(norm_hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:185 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[3].ff.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].ff.training, 140591004393440) # ff_output = self.ff(norm_hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:185 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff.net, accessed_by=DictGetItemGuardAccessor(net) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].ff.net, 140581767533024) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[3].ff.net, 93831537618768) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- LENGTH_CHECK: len(L['self'].transformer_blocks[3].ff.net) == 3 # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff.net.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff.net.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].ff.net.training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff.net[0], accessed_by=GetItemGuardAccessor(0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].ff.net[0], 140581767532976) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff.net[0].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[3].ff.net[0].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff.net[0].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].ff.net[0].training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff.net[0]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff.net[0].proj, accessed_by=DictGetItemGuardAccessor(proj) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].ff.net[0].proj, 140533120448304) # hidden_states = self.proj(hidden_states) # diffusers/src/diffusers/models/activations.py:88 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff.net[0].proj.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[3].ff.net[0].proj.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff.net[0].proj.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].ff.net[0].proj.training, 140591004393408) # hidden_states = self.proj(hidden_states) # diffusers/src/diffusers/models/activations.py:88 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff.net[0].proj._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff.net[0].proj.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].ff.net[0].proj.lora_A, 140533119899904) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff.net[0].proj.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff.net[0].proj.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].ff.net[0].proj.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff.net[0].proj.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].ff.net[0].proj.lora_A['default_0'], 140533119894000) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff.net[0].proj.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff.net[0].proj.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].ff.net[0].proj.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff.net[0].proj.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff.net[0].proj.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].ff.net[0].proj.lora_A['default_0'].weight, 140531602536416) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff.net[0].proj.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].ff.net[0].proj.lora_B, 140533119909216) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff.net[0].proj.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff.net[0].proj.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].ff.net[0].proj.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff.net[0].proj.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].ff.net[0].proj.lora_B['default_0'], 140533119906144) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff.net[0].proj.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff.net[0].proj.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].ff.net[0].proj.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff.net[0].proj.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].ff.net[0].proj.base_layer, 140581767533072) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff.net[0].proj.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff.net[0].proj.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].ff.net[0].proj.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff.net[0].proj.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].ff.net[0].proj.lora_dropout, 140533119907248) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff.net[0].proj.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff.net[0].proj.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].ff.net[0].proj.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff.net[0].proj.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].ff.net[0].proj.lora_dropout['default_0'], 140533119893808) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff.net[0].proj.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff.net[0].proj.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].ff.net[0].proj.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff.net[0].proj.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[3].ff.net[0].proj.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[3].ff.net[0].proj.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff.net[0].proj.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[3].ff.net[0].proj.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff.net[0].proj.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[3].ff.net[0].proj.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[3].ff.net[0].proj.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff.net[0].proj.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].ff.net[0].proj.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff.net[0].proj._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff.net[0].proj._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff.net[0].proj.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[3].ff.net[0].proj.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[3].ff.net[0].proj.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff.net[0].proj._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].ff.net[0].proj._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff.net[0].proj._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff.net[0].proj._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff.net[0].proj._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[3].ff.net[0].proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[3].ff.net[0].proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff.net[0].approximate, accessed_by=DictGetItemGuardAccessor(approximate) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[3].ff.net[0].approximate == 'tanh' # return F.gelu(gate, approximate=self.approximate) # diffusers/src/diffusers/models/activations.py:83 in gelu V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff.net[0]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff.net[0]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff.net[0]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff.net[0]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff.net[1], accessed_by=GetItemGuardAccessor(1) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].ff.net[1], 140581767533120) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff.net[1].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff.net[1].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].ff.net[1].training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff.net[2], accessed_by=GetItemGuardAccessor(2) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].ff.net[2], 140533119901200) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff.net[2].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[3].ff.net[2].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff.net[2].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].ff.net[2].training, 140591004393408) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff.net[2]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff.net[2].lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].ff.net[2].lora_A, 140533119895392) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff.net[2].lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff.net[2].lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].ff.net[2].lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff.net[2].lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].ff.net[2].lora_A['default_0'], 140533119894720) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff.net[2].lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff.net[2].lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].ff.net[2].lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff.net[2].lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff.net[2].lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].ff.net[2].lora_A['default_0'].weight, 140531602522416) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff.net[2].lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].ff.net[2].lora_B, 140533119899328) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff.net[2].lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff.net[2].lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].ff.net[2].lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff.net[2].lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].ff.net[2].lora_B['default_0'], 140533120318048) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff.net[2].lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff.net[2].lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].ff.net[2].lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff.net[2].base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].ff.net[2].base_layer, 140581767533168) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff.net[2].base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff.net[2].base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].ff.net[2].base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff.net[2].lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].ff.net[2].lora_dropout, 140533119903456) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff.net[2].lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff.net[2].lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].ff.net[2].lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff.net[2].lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].ff.net[2].lora_dropout['default_0'], 140533119896688) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff.net[2].lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff.net[2].lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].ff.net[2].lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff.net[2].scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[3].ff.net[2].scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[3].ff.net[2].scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff.net[2].scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[3].ff.net[2].scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff.net[2].use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[3].ff.net[2].use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[3].ff.net[2].use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff.net[2].use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].ff.net[2].use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff.net[2]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff.net[2]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff.net[2].merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[3].ff.net[2].merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[3].ff.net[2].merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff.net[2]._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].ff.net[2]._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff.net[2]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff.net[2]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff.net[2]._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[3].ff.net[2]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[3].ff.net[2]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn, accessed_by=DictGetItemGuardAccessor(attn) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn, 140581767532016) # attn_output, context_attn_output = self.attn( # diffusers/src/diffusers/models/transformers/transformer_flux.py:172 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[3].attn.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.training, 140591004393440) # attn_output, context_attn_output = self.attn( # diffusers/src/diffusers/models/transformers/transformer_flux.py:172 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_k, accessed_by=DictGetItemGuardAccessor(to_k) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.to_k, 140533121546704) # key = attn.to_k(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1717 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[3].attn.to_k.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.to_k.training, 140591004393408) # key = attn.to_k(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1717 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_k._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_k.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.to_k.lora_A, 140533121536336) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_k.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_k.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.to_k.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_k.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.to_k.lora_A['default_0'], 140533121532880) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_k.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_k.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.to_k.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_k.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_k.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.to_k.lora_A['default_0'].weight, 140531603225664) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_k.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.to_k.lora_B, 140533121536960) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_k.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_k.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.to_k.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_k.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.to_k.lora_B['default_0'], 140533121544688) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_k.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_k.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.to_k.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_k.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.to_k.base_layer, 140581767532160) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_k.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_k.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.to_k.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_k.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.to_k.lora_dropout, 140533121537440) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_k.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_k.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.to_k.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_k.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.to_k.lora_dropout['default_0'], 140533121545888) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_k.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_k.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.to_k.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_k.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[3].attn.to_k.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[3].attn.to_k.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_k.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[3].attn.to_k.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_k.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[3].attn.to_k.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[3].attn.to_k.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_k.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.to_k.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_k._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_k._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_k.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[3].attn.to_k.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[3].attn.to_k.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_k._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.to_k._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_k._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_k._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_k._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[3].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[3].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_q, accessed_by=DictGetItemGuardAccessor(to_q) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.to_q, 140533121559392) # query = attn.to_q(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1716 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[3].attn.to_q.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.to_q.training, 140591004393408) # query = attn.to_q(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1716 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_q._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_q.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.to_q.lora_A, 140533121563856) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_q.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_q.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.to_q.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_q.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.to_q.lora_A['default_0'], 140533121560256) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_q.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_q.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.to_q.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_q.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_q.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.to_q.lora_A['default_0'].weight, 140531603212624) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_q.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.to_q.lora_B, 140533121564240) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_q.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_q.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.to_q.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_q.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.to_q.lora_B['default_0'], 140533121557184) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_q.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_q.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.to_q.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_q.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.to_q.base_layer, 140581767532256) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_q.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_q.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.to_q.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_q.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.to_q.lora_dropout, 140533121562224) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_q.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_q.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.to_q.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_q.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.to_q.lora_dropout['default_0'], 140533121549984) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_q.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_q.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.to_q.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_q.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[3].attn.to_q.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[3].attn.to_q.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_q.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[3].attn.to_q.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_q.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[3].attn.to_q.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[3].attn.to_q.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_q.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.to_q.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_q._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_q._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_q.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[3].attn.to_q.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[3].attn.to_q.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_q._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.to_q._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_q._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_q._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_q._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[3].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[3].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_v, accessed_by=DictGetItemGuardAccessor(to_v) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.to_v, 140533121589328) # value = attn.to_v(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1718 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_v.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[3].attn.to_v.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_v.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.to_v.training, 140591004393408) # value = attn.to_v(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1718 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_v._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_v.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.to_v.lora_A, 140533121589616) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_v.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_v.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.to_v.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_v.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.to_v.lora_A['default_0'], 140533121593984) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_v.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_v.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.to_v.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_v.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_v.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.to_v.lora_A['default_0'].weight, 140531603223344) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_v.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.to_v.lora_B, 140533121589472) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_v.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_v.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.to_v.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_v.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.to_v.lora_B['default_0'], 140533121593600) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_v.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_v.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.to_v.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_v.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.to_v.base_layer, 140581767532352) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_v.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_v.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.to_v.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_v.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.to_v.lora_dropout, 140533121596672) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_v.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_v.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.to_v.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_v.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.to_v.lora_dropout['default_0'], 140533121588704) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_v.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_v.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.to_v.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_v.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[3].attn.to_v.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[3].attn.to_v.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_v.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[3].attn.to_v.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_v.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[3].attn.to_v.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[3].attn.to_v.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_v.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.to_v.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_v._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_v._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_v.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[3].attn.to_v.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[3].attn.to_v.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_v._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.to_v._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_v._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_v._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_v._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[3].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[3].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.norm_k, accessed_by=DictGetItemGuardAccessor(norm_k) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.norm_k, 140581767532208) # if attn.norm_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1729 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.norm_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[3].attn.norm_k.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.norm_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.norm_k.training, 140591004393440) # if attn.norm_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1729 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.norm_k.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[3].attn.norm_k.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.norm_k._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.norm_k.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.norm_k.weight, 140581766002016) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.norm_k._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.norm_k._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.norm_k._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.norm_k._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.norm_q, accessed_by=DictGetItemGuardAccessor(norm_q) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.norm_q, 140581767532112) # if attn.norm_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1727 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.norm_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[3].attn.norm_q.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.norm_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.norm_q.training, 140591004393440) # if attn.norm_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1727 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.norm_q.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[3].attn.norm_q.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.norm_q._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.norm_q.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.norm_q.weight, 140581766002096) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.norm_q._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.norm_q._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.norm_q._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.norm_q._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_out, accessed_by=DictGetItemGuardAccessor(to_out) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.to_out, 140581767532544) # hidden_states = attn.to_out[0](hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1776 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_out.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_out.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.to_out.training, 140591004393440) # hidden_states = attn.to_out[0](hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1776 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_out[0], accessed_by=GetItemGuardAccessor(0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.to_out[0], 140533120043664) # hidden_states = attn.to_out[0](hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1776 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_out[0].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[3].attn.to_out[0].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_out[0].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.to_out[0].training, 140591004393408) # hidden_states = attn.to_out[0](hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1776 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_out[0]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_out[0].lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.to_out[0].lora_A, 140533120050720) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_out[0].lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_out[0].lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.to_out[0].lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_out[0].lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.to_out[0].lora_A['default_0'], 140533119974944) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_out[0].lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_out[0].lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.to_out[0].lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_out[0].lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_out[0].lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.to_out[0].lora_A['default_0'].weight, 140531602526576) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_out[0].lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.to_out[0].lora_B, 140533120050864) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_out[0].lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_out[0].lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.to_out[0].lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_out[0].lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.to_out[0].lora_B['default_0'], 140533119972112) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_out[0].lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_out[0].lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.to_out[0].lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_out[0].base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.to_out[0].base_layer, 140581767532592) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_out[0].base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_out[0].base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.to_out[0].base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_out[0].lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.to_out[0].lora_dropout, 140533120055232) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_out[0].lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_out[0].lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.to_out[0].lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_out[0].lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.to_out[0].lora_dropout['default_0'], 140533120054752) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_out[0].lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_out[0].lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.to_out[0].lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_out[0].scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[3].attn.to_out[0].scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[3].attn.to_out[0].scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_out[0].scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[3].attn.to_out[0].scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_out[0].use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[3].attn.to_out[0].use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[3].attn.to_out[0].use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_out[0].use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.to_out[0].use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_out[0]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_out[0]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_out[0].merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[3].attn.to_out[0].merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[3].attn.to_out[0].merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_out[0]._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.to_out[0]._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_out[0]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_out[0]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_out[0]._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[3].attn.to_out[0]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[3].attn.to_out[0]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_out[1], accessed_by=GetItemGuardAccessor(1) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.to_out[1], 140581767532640) # hidden_states = attn.to_out[1](hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1778 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_out[1].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_out[1].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.to_out[1].training, 140591004393440) # hidden_states = attn.to_out[1](hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1778 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.add_k_proj, accessed_by=DictGetItemGuardAccessor(add_k_proj) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.add_k_proj, 140533121597200) # encoder_hidden_states_key_proj = attn.add_k_proj(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1736 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.add_k_proj.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[3].attn.add_k_proj.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.add_k_proj.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.add_k_proj.training, 140591004393408) # encoder_hidden_states_key_proj = attn.add_k_proj(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1736 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.add_k_proj._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.add_k_proj.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.add_k_proj.lora_A, 140533121590144) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.add_k_proj.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.add_k_proj.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.add_k_proj.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.add_k_proj.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.add_k_proj.lora_A['default_0'], 140533121596960) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.add_k_proj.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.add_k_proj.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.add_k_proj.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.add_k_proj.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.add_k_proj.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.add_k_proj.lora_A['default_0'].weight, 140531603225904) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.add_k_proj.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.add_k_proj.lora_B, 140533121595664) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.add_k_proj.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.add_k_proj.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.add_k_proj.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.add_k_proj.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.add_k_proj.lora_B['default_0'], 140533121595760) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.add_k_proj.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.add_k_proj.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.add_k_proj.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.add_k_proj.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.add_k_proj.base_layer, 140581767532400) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.add_k_proj.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.add_k_proj.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.add_k_proj.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.add_k_proj.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.add_k_proj.lora_dropout, 140533121595184) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.add_k_proj.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.add_k_proj.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.add_k_proj.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.add_k_proj.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.add_k_proj.lora_dropout['default_0'], 140533121586736) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.add_k_proj.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.add_k_proj.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.add_k_proj.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.add_k_proj.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[3].attn.add_k_proj.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[3].attn.add_k_proj.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.add_k_proj.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[3].attn.add_k_proj.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.add_k_proj.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[3].attn.add_k_proj.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[3].attn.add_k_proj.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.add_k_proj.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.add_k_proj.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.add_k_proj._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.add_k_proj._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.add_k_proj.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[3].attn.add_k_proj.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[3].attn.add_k_proj.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.add_k_proj._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.add_k_proj._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.add_k_proj._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.add_k_proj._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.add_k_proj._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[3].attn.add_k_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[3].attn.add_k_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.add_q_proj, accessed_by=DictGetItemGuardAccessor(add_q_proj) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.add_q_proj, 140533121596768) # encoder_hidden_states_query_proj = attn.add_q_proj(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1735 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.add_q_proj.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[3].attn.add_q_proj.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.add_q_proj.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.add_q_proj.training, 140591004393408) # encoder_hidden_states_query_proj = attn.add_q_proj(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1735 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.add_q_proj._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.add_q_proj.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.add_q_proj.lora_A, 140533120055088) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.add_q_proj.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.add_q_proj.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.add_q_proj.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.add_q_proj.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.add_q_proj.lora_A['default_0'], 140533120050768) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.add_q_proj.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.add_q_proj.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.add_q_proj.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.add_q_proj.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.add_q_proj.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.add_q_proj.lora_A['default_0'].weight, 140531602528496) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.add_q_proj.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.add_q_proj.lora_B, 140533120046880) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.add_q_proj.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.add_q_proj.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.add_q_proj.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.add_q_proj.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.add_q_proj.lora_B['default_0'], 140533120055184) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.add_q_proj.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.add_q_proj.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.add_q_proj.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.add_q_proj.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.add_q_proj.base_layer, 140581767532496) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.add_q_proj.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.add_q_proj.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.add_q_proj.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.add_q_proj.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.add_q_proj.lora_dropout, 140533121590864) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.add_q_proj.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.add_q_proj.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.add_q_proj.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.add_q_proj.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.add_q_proj.lora_dropout['default_0'], 140533121584720) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.add_q_proj.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.add_q_proj.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.add_q_proj.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.add_q_proj.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[3].attn.add_q_proj.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[3].attn.add_q_proj.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.add_q_proj.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[3].attn.add_q_proj.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.add_q_proj.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[3].attn.add_q_proj.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[3].attn.add_q_proj.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.add_q_proj.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.add_q_proj.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.add_q_proj._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.add_q_proj._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.add_q_proj.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[3].attn.add_q_proj.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[3].attn.add_q_proj.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.add_q_proj._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.add_q_proj._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.add_q_proj._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.add_q_proj._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.add_q_proj._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[3].attn.add_q_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[3].attn.add_q_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.add_v_proj, accessed_by=DictGetItemGuardAccessor(add_v_proj) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.add_v_proj, 140533121583136) # encoder_hidden_states_value_proj = attn.add_v_proj(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1737 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.add_v_proj.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[3].attn.add_v_proj.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.add_v_proj.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.add_v_proj.training, 140591004393408) # encoder_hidden_states_value_proj = attn.add_v_proj(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1737 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.add_v_proj._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.add_v_proj.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.add_v_proj.lora_A, 140533121584480) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.add_v_proj.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.add_v_proj.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.add_v_proj.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.add_v_proj.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.add_v_proj.lora_A['default_0'], 140533121594704) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.add_v_proj.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.add_v_proj.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.add_v_proj.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.add_v_proj.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.add_v_proj.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.add_v_proj.lora_A['default_0'].weight, 140531603221184) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.add_v_proj.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.add_v_proj.lora_B, 140533121596384) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.add_v_proj.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.add_v_proj.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.add_v_proj.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.add_v_proj.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.add_v_proj.lora_B['default_0'], 140533121596000) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.add_v_proj.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.add_v_proj.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.add_v_proj.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.add_v_proj.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.add_v_proj.base_layer, 140581767532448) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.add_v_proj.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.add_v_proj.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.add_v_proj.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.add_v_proj.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.add_v_proj.lora_dropout, 140533121591392) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.add_v_proj.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.add_v_proj.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.add_v_proj.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.add_v_proj.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.add_v_proj.lora_dropout['default_0'], 140533121584048) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.add_v_proj.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.add_v_proj.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.add_v_proj.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.add_v_proj.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[3].attn.add_v_proj.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[3].attn.add_v_proj.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.add_v_proj.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[3].attn.add_v_proj.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.add_v_proj.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[3].attn.add_v_proj.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[3].attn.add_v_proj.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.add_v_proj.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.add_v_proj.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.add_v_proj._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.add_v_proj._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.add_v_proj.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[3].attn.add_v_proj.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[3].attn.add_v_proj.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.add_v_proj._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.add_v_proj._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.add_v_proj._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.add_v_proj._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.add_v_proj._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[3].attn.add_v_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[3].attn.add_v_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_add_out, accessed_by=DictGetItemGuardAccessor(to_add_out) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.to_add_out, 140533119962176) # encoder_hidden_states = attn.to_add_out(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1779 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_add_out.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[3].attn.to_add_out.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_add_out.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.to_add_out.training, 140591004393408) # encoder_hidden_states = attn.to_add_out(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1779 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_add_out._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_add_out.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.to_add_out.lora_A, 140533120444848) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_add_out.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_add_out.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.to_add_out.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_add_out.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.to_add_out.lora_A['default_0'], 140533120440720) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_add_out.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_add_out.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.to_add_out.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_add_out.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_add_out.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.to_add_out.lora_A['default_0'].weight, 140531602528176) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_add_out.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.to_add_out.lora_B, 140533120448064) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_add_out.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_add_out.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.to_add_out.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_add_out.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.to_add_out.lora_B['default_0'], 140533120438368) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_add_out.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_add_out.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.to_add_out.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_add_out.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.to_add_out.base_layer, 140581767532688) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_add_out.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_add_out.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.to_add_out.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_add_out.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.to_add_out.lora_dropout, 140533119963616) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_add_out.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_add_out.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.to_add_out.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_add_out.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.to_add_out.lora_dropout['default_0'], 140533119961600) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_add_out.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_add_out.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.to_add_out.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_add_out.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[3].attn.to_add_out.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[3].attn.to_add_out.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_add_out.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[3].attn.to_add_out.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_add_out.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[3].attn.to_add_out.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[3].attn.to_add_out.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_add_out.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.to_add_out.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_add_out._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_add_out._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_add_out.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[3].attn.to_add_out.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[3].attn.to_add_out.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_add_out._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.to_add_out._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_add_out._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_add_out._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.to_add_out._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[3].attn.to_add_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[3].attn.to_add_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.norm_added_k, accessed_by=DictGetItemGuardAccessor(norm_added_k) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.norm_added_k, 140581767532832) # if attn.norm_added_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1751 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.norm_added_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[3].attn.norm_added_k.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.norm_added_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.norm_added_k.training, 140591004393440) # if attn.norm_added_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1751 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.norm_added_k.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[3].attn.norm_added_k.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.norm_added_k._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.norm_added_k.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.norm_added_k.weight, 140581766001856) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.norm_added_k._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.norm_added_k._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.norm_added_k._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.norm_added_k._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.norm_added_q, accessed_by=DictGetItemGuardAccessor(norm_added_q) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.norm_added_q, 140581767532736) # if attn.norm_added_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1749 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.norm_added_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[3].attn.norm_added_q.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.norm_added_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.norm_added_q.training, 140591004393440) # if attn.norm_added_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1749 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.norm_added_q.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[3].attn.norm_added_q.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.norm_added_q._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.norm_added_q.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.norm_added_q.weight, 140581766001936) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.norm_added_q._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.norm_added_q._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.norm_added_q._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.norm_added_q._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.heads, accessed_by=DictGetItemGuardAccessor(heads) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[3].attn.heads == 24 # head_dim = inner_dim // attn.heads # diffusers/src/diffusers/models/attention_processor.py:1721 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn.processor, accessed_by=DictGetItemGuardAccessor(processor) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[3].attn.processor, 93831581524080) # attn_parameters = set(inspect.signature(self.processor.__call__).parameters.keys()) # diffusers/src/diffusers/models/attention_processor.py:479 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].attn.processor, 140581767531968) # return self.processor( # diffusers/src/diffusers/models/attention_processor.py:490 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].attn._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].norm1, accessed_by=DictGetItemGuardAccessor(norm1) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].norm1, 140581767531536) # norm_hidden_states, gate_msa, shift_mlp, scale_mlp, gate_mlp = self.norm1(hidden_states, emb=temb) # diffusers/src/diffusers/models/transformers/transformer_flux.py:165 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].norm1.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[3].norm1.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].norm1.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].norm1.training, 140591004393440) # norm_hidden_states, gate_msa, shift_mlp, scale_mlp, gate_mlp = self.norm1(hidden_states, emb=temb) # diffusers/src/diffusers/models/transformers/transformer_flux.py:165 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].norm1.emb, accessed_by=DictGetItemGuardAccessor(emb) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].norm1.emb, 140591004478624) # if self.emb is not None: # diffusers/src/diffusers/models/normalization.py:135 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].norm1._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].norm1.norm, accessed_by=DictGetItemGuardAccessor(norm) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].norm1.norm, 140581767531680) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:139 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].norm1.norm.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].norm1.norm.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].norm1.norm.training, 140591004393440) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:139 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].norm1.silu, accessed_by=DictGetItemGuardAccessor(silu) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].norm1.silu, 140581767531584) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].norm1.silu.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].norm1.silu.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].norm1.silu.training, 140591004393440) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].norm1.linear, accessed_by=DictGetItemGuardAccessor(linear) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].norm1.linear, 140533120095024) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].norm1.linear.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[3].norm1.linear.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].norm1.linear.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].norm1.linear.training, 140591004393408) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].norm1.linear._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].norm1.linear.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].norm1.linear.lora_A, 140533120095216) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].norm1.linear.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].norm1.linear.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].norm1.linear.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].norm1.linear.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].norm1.linear.lora_A['default_0'], 140533120100016) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].norm1.linear.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].norm1.linear.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].norm1.linear.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].norm1.linear.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].norm1.linear.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].norm1.linear.lora_A['default_0'].weight, 140531603215744) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].norm1.linear.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].norm1.linear.lora_B, 140533120090416) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].norm1.linear.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].norm1.linear.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].norm1.linear.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].norm1.linear.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].norm1.linear.lora_B['default_0'], 140533120090608) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].norm1.linear.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].norm1.linear.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].norm1.linear.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].norm1.linear.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].norm1.linear.base_layer, 140581767531632) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].norm1.linear.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].norm1.linear.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].norm1.linear.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].norm1.linear.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].norm1.linear.lora_dropout, 140533120098432) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].norm1.linear.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].norm1.linear.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].norm1.linear.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].norm1.linear.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].norm1.linear.lora_dropout['default_0'], 140533120101744) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].norm1.linear.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].norm1.linear.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].norm1.linear.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].norm1.linear.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[3].norm1.linear.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[3].norm1.linear.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].norm1.linear.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[3].norm1.linear.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].norm1.linear.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[3].norm1.linear.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[3].norm1.linear.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].norm1.linear.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].norm1.linear.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].norm1.linear._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].norm1.linear._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].norm1.linear.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[3].norm1.linear.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[3].norm1.linear.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].norm1.linear._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].norm1.linear._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].norm1.linear._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].norm1.linear._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].norm1.linear._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[3].norm1.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[3].norm1.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].norm1._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].norm1._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].norm1._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].norm1._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].norm2, accessed_by=DictGetItemGuardAccessor(norm2) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].norm2, 140581767532880) # norm_hidden_states = self.norm2(hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:182 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].norm2.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].norm2.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].norm2.training, 140591004393440) # norm_hidden_states = self.norm2(hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:182 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff_context, accessed_by=DictGetItemGuardAccessor(ff_context) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].ff_context, 140581767533216) # context_ff_output = self.ff_context(norm_encoder_hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:198 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff_context.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[3].ff_context.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff_context.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].ff_context.training, 140591004393440) # context_ff_output = self.ff_context(norm_encoder_hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:198 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff_context._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff_context.net, accessed_by=DictGetItemGuardAccessor(net) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].ff_context.net, 140581767533360) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[3].ff_context.net, 93831537618768) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- LENGTH_CHECK: len(L['self'].transformer_blocks[3].ff_context.net) == 3 # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff_context.net.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff_context.net.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].ff_context.net.training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff_context.net[0], accessed_by=GetItemGuardAccessor(0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].ff_context.net[0], 140581767533312) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff_context.net[0].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[3].ff_context.net[0].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff_context.net[0].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].ff_context.net[0].training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff_context.net[0]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff_context.net[0].proj, accessed_by=DictGetItemGuardAccessor(proj) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].ff_context.net[0].proj, 140533120312288) # hidden_states = self.proj(hidden_states) # diffusers/src/diffusers/models/activations.py:88 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff_context.net[0].proj.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[3].ff_context.net[0].proj.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff_context.net[0].proj.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].ff_context.net[0].proj.training, 140591004393408) # hidden_states = self.proj(hidden_states) # diffusers/src/diffusers/models/activations.py:88 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff_context.net[0].proj._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff_context.net[0].proj.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].ff_context.net[0].proj.lora_A, 140533120310512) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff_context.net[0].proj.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff_context.net[0].proj.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].ff_context.net[0].proj.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff_context.net[0].proj.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].ff_context.net[0].proj.lora_A['default_0'], 140533120308688) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff_context.net[0].proj.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff_context.net[0].proj.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].ff_context.net[0].proj.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff_context.net[0].proj.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff_context.net[0].proj.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].ff_context.net[0].proj.lora_A['default_0'].weight, 140531602532576) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff_context.net[0].proj.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].ff_context.net[0].proj.lora_B, 140533120317328) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff_context.net[0].proj.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff_context.net[0].proj.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].ff_context.net[0].proj.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff_context.net[0].proj.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].ff_context.net[0].proj.lora_B['default_0'], 140533120318576) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff_context.net[0].proj.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff_context.net[0].proj.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].ff_context.net[0].proj.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff_context.net[0].proj.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].ff_context.net[0].proj.base_layer, 140581767533408) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff_context.net[0].proj.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff_context.net[0].proj.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].ff_context.net[0].proj.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff_context.net[0].proj.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].ff_context.net[0].proj.lora_dropout, 140533120304656) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff_context.net[0].proj.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff_context.net[0].proj.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].ff_context.net[0].proj.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff_context.net[0].proj.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].ff_context.net[0].proj.lora_dropout['default_0'], 140533120312528) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff_context.net[0].proj.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff_context.net[0].proj.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].ff_context.net[0].proj.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff_context.net[0].proj.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[3].ff_context.net[0].proj.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[3].ff_context.net[0].proj.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff_context.net[0].proj.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[3].ff_context.net[0].proj.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff_context.net[0].proj.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[3].ff_context.net[0].proj.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[3].ff_context.net[0].proj.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff_context.net[0].proj.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].ff_context.net[0].proj.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff_context.net[0].proj._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff_context.net[0].proj._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff_context.net[0].proj.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[3].ff_context.net[0].proj.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[3].ff_context.net[0].proj.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff_context.net[0].proj._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].ff_context.net[0].proj._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff_context.net[0].proj._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff_context.net[0].proj._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff_context.net[0].proj._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[3].ff_context.net[0].proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[3].ff_context.net[0].proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff_context.net[0].approximate, accessed_by=DictGetItemGuardAccessor(approximate) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[3].ff_context.net[0].approximate == 'tanh' # return F.gelu(gate, approximate=self.approximate) # diffusers/src/diffusers/models/activations.py:83 in gelu V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff_context.net[0]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff_context.net[0]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff_context.net[0]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff_context.net[0]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff_context.net[1], accessed_by=GetItemGuardAccessor(1) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].ff_context.net[1], 140581767533504) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff_context.net[1].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff_context.net[1].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].ff_context.net[1].training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff_context.net[2], accessed_by=GetItemGuardAccessor(2) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].ff_context.net[2], 140533120317232) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff_context.net[2].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[3].ff_context.net[2].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff_context.net[2].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].ff_context.net[2].training, 140591004393408) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff_context.net[2]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff_context.net[2].lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].ff_context.net[2].lora_A, 140533120317568) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff_context.net[2].lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff_context.net[2].lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].ff_context.net[2].lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff_context.net[2].lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].ff_context.net[2].lora_A['default_0'], 140533120319152) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff_context.net[2].lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff_context.net[2].lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].ff_context.net[2].lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff_context.net[2].lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff_context.net[2].lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].ff_context.net[2].lora_A['default_0'].weight, 140531602537776) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff_context.net[2].lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].ff_context.net[2].lora_B, 140533120304704) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff_context.net[2].lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff_context.net[2].lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].ff_context.net[2].lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff_context.net[2].lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].ff_context.net[2].lora_B['default_0'], 140533120313920) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff_context.net[2].lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff_context.net[2].lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].ff_context.net[2].lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff_context.net[2].base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].ff_context.net[2].base_layer, 140581767533552) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff_context.net[2].base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff_context.net[2].base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].ff_context.net[2].base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff_context.net[2].lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].ff_context.net[2].lora_dropout, 140533120306528) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff_context.net[2].lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff_context.net[2].lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].ff_context.net[2].lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff_context.net[2].lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].ff_context.net[2].lora_dropout['default_0'], 140533120317184) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff_context.net[2].lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff_context.net[2].lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].ff_context.net[2].lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff_context.net[2].scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[3].ff_context.net[2].scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[3].ff_context.net[2].scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff_context.net[2].scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[3].ff_context.net[2].scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff_context.net[2].use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[3].ff_context.net[2].use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[3].ff_context.net[2].use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff_context.net[2].use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].ff_context.net[2].use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff_context.net[2]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff_context.net[2]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff_context.net[2].merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[3].ff_context.net[2].merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[3].ff_context.net[2].merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff_context.net[2]._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].ff_context.net[2]._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff_context.net[2]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff_context.net[2]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff_context.net[2]._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[3].ff_context.net[2]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[3].ff_context.net[2]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff_context._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff_context._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff_context._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].ff_context._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].norm1_context, accessed_by=DictGetItemGuardAccessor(norm1_context) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].norm1_context, 140581767531728) # norm_encoder_hidden_states, c_gate_msa, c_shift_mlp, c_scale_mlp, c_gate_mlp = self.norm1_context( # diffusers/src/diffusers/models/transformers/transformer_flux.py:167 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].norm1_context.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[3].norm1_context.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].norm1_context.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].norm1_context.training, 140591004393440) # norm_encoder_hidden_states, c_gate_msa, c_shift_mlp, c_scale_mlp, c_gate_mlp = self.norm1_context( # diffusers/src/diffusers/models/transformers/transformer_flux.py:167 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].norm1_context.emb, accessed_by=DictGetItemGuardAccessor(emb) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].norm1_context.emb, 140591004478624) # if self.emb is not None: # diffusers/src/diffusers/models/normalization.py:135 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].norm1_context._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].norm1_context.norm, accessed_by=DictGetItemGuardAccessor(norm) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].norm1_context.norm, 140581767531920) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:139 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].norm1_context.norm.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].norm1_context.norm.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].norm1_context.norm.training, 140591004393440) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:139 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].norm1_context.silu, accessed_by=DictGetItemGuardAccessor(silu) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].norm1_context.silu, 140581767531824) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].norm1_context.silu.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].norm1_context.silu.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].norm1_context.silu.training, 140591004393440) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].norm1_context.linear, accessed_by=DictGetItemGuardAccessor(linear) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].norm1_context.linear, 140533121719152) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].norm1_context.linear.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[3].norm1_context.linear.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].norm1_context.linear.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].norm1_context.linear.training, 140591004393408) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].norm1_context.linear._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].norm1_context.linear.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].norm1_context.linear.lora_A, 140533121717040) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].norm1_context.linear.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].norm1_context.linear.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].norm1_context.linear.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].norm1_context.linear.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].norm1_context.linear.lora_A['default_0'], 140533121560352) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].norm1_context.linear.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].norm1_context.linear.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].norm1_context.linear.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].norm1_context.linear.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].norm1_context.linear.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].norm1_context.linear.lora_A['default_0'].weight, 140531603217024) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].norm1_context.linear.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].norm1_context.linear.lora_B, 140533121728080) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].norm1_context.linear.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].norm1_context.linear.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].norm1_context.linear.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].norm1_context.linear.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].norm1_context.linear.lora_B['default_0'], 140533121558720) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].norm1_context.linear.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].norm1_context.linear.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].norm1_context.linear.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].norm1_context.linear.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].norm1_context.linear.base_layer, 140581767531872) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].norm1_context.linear.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].norm1_context.linear.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].norm1_context.linear.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].norm1_context.linear.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].norm1_context.linear.lora_dropout, 140533121718912) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].norm1_context.linear.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].norm1_context.linear.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].norm1_context.linear.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].norm1_context.linear.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].norm1_context.linear.lora_dropout['default_0'], 140533121718816) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].norm1_context.linear.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].norm1_context.linear.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].norm1_context.linear.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].norm1_context.linear.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[3].norm1_context.linear.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[3].norm1_context.linear.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].norm1_context.linear.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[3].norm1_context.linear.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].norm1_context.linear.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[3].norm1_context.linear.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[3].norm1_context.linear.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].norm1_context.linear.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].norm1_context.linear.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].norm1_context.linear._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].norm1_context.linear._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].norm1_context.linear.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[3].norm1_context.linear.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[3].norm1_context.linear.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].norm1_context.linear._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].norm1_context.linear._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].norm1_context.linear._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].norm1_context.linear._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].norm1_context.linear._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[3].norm1_context.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[3].norm1_context.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].norm1_context._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].norm1_context._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].norm1_context._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].norm1_context._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].norm2_context, accessed_by=DictGetItemGuardAccessor(norm2_context) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].norm2_context, 140581767532928) # norm_encoder_hidden_states = self.norm2_context(encoder_hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:195 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].norm2_context.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3].norm2_context.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[3].norm2_context.training, 140591004393440) # norm_encoder_hidden_states = self.norm2_context(encoder_hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:195 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[3]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | +- GuardManager: source=L['self'].transformer_blocks[4], accessed_by=GetItemGuardAccessor(4) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4], 140581767531200) # for index_block, block in enumerate(self.transformer_blocks): # diffusers/src/diffusers/models/transformers/transformer_flux.py:471 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[4].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].training, 140591004393440) # for index_block, block in enumerate(self.transformer_blocks): # diffusers/src/diffusers/models/transformers/transformer_flux.py:471 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff, accessed_by=DictGetItemGuardAccessor(ff) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].ff, 140581767534848) # ff_output = self.ff(norm_hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:185 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[4].ff.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].ff.training, 140591004393440) # ff_output = self.ff(norm_hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:185 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff.net, accessed_by=DictGetItemGuardAccessor(net) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].ff.net, 140581767535088) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[4].ff.net, 93831537618768) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- LENGTH_CHECK: len(L['self'].transformer_blocks[4].ff.net) == 3 # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff.net.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff.net.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].ff.net.training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff.net[0], accessed_by=GetItemGuardAccessor(0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].ff.net[0], 140581767535040) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff.net[0].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[4].ff.net[0].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff.net[0].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].ff.net[0].training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff.net[0]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff.net[0].proj, accessed_by=DictGetItemGuardAccessor(proj) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].ff.net[0].proj, 140581770785696) # hidden_states = self.proj(hidden_states) # diffusers/src/diffusers/models/activations.py:88 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff.net[0].proj.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[4].ff.net[0].proj.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff.net[0].proj.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].ff.net[0].proj.training, 140591004393408) # hidden_states = self.proj(hidden_states) # diffusers/src/diffusers/models/activations.py:88 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff.net[0].proj._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff.net[0].proj.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].ff.net[0].proj.lora_A, 140533121779792) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff.net[0].proj.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff.net[0].proj.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].ff.net[0].proj.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff.net[0].proj.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].ff.net[0].proj.lora_A['default_0'], 140533121778784) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff.net[0].proj.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff.net[0].proj.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].ff.net[0].proj.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff.net[0].proj.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff.net[0].proj.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].ff.net[0].proj.lora_A['default_0'].weight, 140537662768048) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff.net[0].proj.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].ff.net[0].proj.lora_B, 140533121784880) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff.net[0].proj.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff.net[0].proj.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].ff.net[0].proj.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff.net[0].proj.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].ff.net[0].proj.lora_B['default_0'], 140533121780080) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff.net[0].proj.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff.net[0].proj.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].ff.net[0].proj.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff.net[0].proj.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].ff.net[0].proj.base_layer, 140581767535136) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff.net[0].proj.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff.net[0].proj.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].ff.net[0].proj.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff.net[0].proj.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].ff.net[0].proj.lora_dropout, 140533121790112) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff.net[0].proj.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff.net[0].proj.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].ff.net[0].proj.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff.net[0].proj.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].ff.net[0].proj.lora_dropout['default_0'], 140533121783008) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff.net[0].proj.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff.net[0].proj.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].ff.net[0].proj.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff.net[0].proj.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[4].ff.net[0].proj.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[4].ff.net[0].proj.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff.net[0].proj.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[4].ff.net[0].proj.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff.net[0].proj.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[4].ff.net[0].proj.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[4].ff.net[0].proj.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff.net[0].proj.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].ff.net[0].proj.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff.net[0].proj._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff.net[0].proj._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff.net[0].proj.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[4].ff.net[0].proj.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[4].ff.net[0].proj.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff.net[0].proj._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].ff.net[0].proj._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff.net[0].proj._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff.net[0].proj._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff.net[0].proj._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[4].ff.net[0].proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[4].ff.net[0].proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff.net[0].approximate, accessed_by=DictGetItemGuardAccessor(approximate) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[4].ff.net[0].approximate == 'tanh' # return F.gelu(gate, approximate=self.approximate) # diffusers/src/diffusers/models/activations.py:83 in gelu V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff.net[0]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff.net[0]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff.net[0]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff.net[0]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff.net[1], accessed_by=GetItemGuardAccessor(1) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].ff.net[1], 140581767535184) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff.net[1].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff.net[1].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].ff.net[1].training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff.net[2], accessed_by=GetItemGuardAccessor(2) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].ff.net[2], 140533121782624) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff.net[2].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[4].ff.net[2].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff.net[2].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].ff.net[2].training, 140591004393408) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff.net[2]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff.net[2].lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].ff.net[2].lora_A, 140533121784352) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff.net[2].lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff.net[2].lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].ff.net[2].lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff.net[2].lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].ff.net[2].lora_A['default_0'], 140533120767872) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff.net[2].lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff.net[2].lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].ff.net[2].lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff.net[2].lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff.net[2].lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].ff.net[2].lora_A['default_0'].weight, 140533143162688) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff.net[2].lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].ff.net[2].lora_B, 140533121790976) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff.net[2].lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff.net[2].lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].ff.net[2].lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff.net[2].lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].ff.net[2].lora_B['default_0'], 140533120762208) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff.net[2].lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff.net[2].lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].ff.net[2].lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff.net[2].base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].ff.net[2].base_layer, 140581767535232) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff.net[2].base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff.net[2].base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].ff.net[2].base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff.net[2].lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].ff.net[2].lora_dropout, 140533121787472) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff.net[2].lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff.net[2].lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].ff.net[2].lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff.net[2].lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].ff.net[2].lora_dropout['default_0'], 140533121792704) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff.net[2].lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff.net[2].lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].ff.net[2].lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff.net[2].scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[4].ff.net[2].scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[4].ff.net[2].scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff.net[2].scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[4].ff.net[2].scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff.net[2].use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[4].ff.net[2].use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[4].ff.net[2].use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff.net[2].use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].ff.net[2].use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff.net[2]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff.net[2]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff.net[2].merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[4].ff.net[2].merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[4].ff.net[2].merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff.net[2]._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].ff.net[2]._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff.net[2]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff.net[2]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff.net[2]._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[4].ff.net[2]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[4].ff.net[2]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn, accessed_by=DictGetItemGuardAccessor(attn) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn, 140581767534080) # attn_output, context_attn_output = self.attn( # diffusers/src/diffusers/models/transformers/transformer_flux.py:172 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[4].attn.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.training, 140591004393440) # attn_output, context_attn_output = self.attn( # diffusers/src/diffusers/models/transformers/transformer_flux.py:172 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_k, accessed_by=DictGetItemGuardAccessor(to_k) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.to_k, 140533121383584) # key = attn.to_k(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1717 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[4].attn.to_k.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.to_k.training, 140591004393408) # key = attn.to_k(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1717 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_k._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_k.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.to_k.lora_A, 140533121798624) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_k.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_k.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.to_k.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_k.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.to_k.lora_A['default_0'], 140533121807120) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_k.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_k.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.to_k.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_k.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_k.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.to_k.lora_A['default_0'].weight, 140533076431616) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_k.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.to_k.lora_B, 140533121794400) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_k.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_k.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.to_k.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_k.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.to_k.lora_B['default_0'], 140533121804480) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_k.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_k.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.to_k.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_k.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.to_k.base_layer, 140581767534224) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_k.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_k.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.to_k.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_k.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.to_k.lora_dropout, 140533121805536) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_k.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_k.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.to_k.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_k.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.to_k.lora_dropout['default_0'], 140533121794592) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_k.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_k.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.to_k.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_k.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[4].attn.to_k.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[4].attn.to_k.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_k.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[4].attn.to_k.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_k.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[4].attn.to_k.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[4].attn.to_k.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_k.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.to_k.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_k._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_k._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_k.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[4].attn.to_k.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[4].attn.to_k.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_k._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.to_k._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_k._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_k._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_k._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[4].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[4].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_q, accessed_by=DictGetItemGuardAccessor(to_q) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.to_q, 140533120834512) # query = attn.to_q(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1716 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[4].attn.to_q.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.to_q.training, 140591004393408) # query = attn.to_q(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1716 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_q._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_q.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.to_q.lora_A, 140533121383872) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_q.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_q.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.to_q.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_q.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.to_q.lora_A['default_0'], 140533121382432) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_q.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_q.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.to_q.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_q.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_q.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.to_q.lora_A['default_0'].weight, 140533076436016) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_q.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.to_q.lora_B, 140533121380320) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_q.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_q.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.to_q.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_q.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.to_q.lora_B['default_0'], 140533121374416) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_q.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_q.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.to_q.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_q.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.to_q.base_layer, 140581767534320) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_q.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_q.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.to_q.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_q.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.to_q.lora_dropout, 140533120840176) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_q.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_q.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.to_q.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_q.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.to_q.lora_dropout['default_0'], 140533120833264) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_q.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_q.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.to_q.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_q.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[4].attn.to_q.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[4].attn.to_q.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_q.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[4].attn.to_q.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_q.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[4].attn.to_q.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[4].attn.to_q.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_q.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.to_q.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_q._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_q._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_q.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[4].attn.to_q.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[4].attn.to_q.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_q._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.to_q._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_q._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_q._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_q._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[4].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[4].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_v, accessed_by=DictGetItemGuardAccessor(to_v) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.to_v, 140533121794640) # value = attn.to_v(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1718 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_v.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[4].attn.to_v.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_v.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.to_v.training, 140591004393408) # value = attn.to_v(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1718 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_v._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_v.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.to_v.lora_A, 140533121798144) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_v.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_v.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.to_v.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_v.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.to_v.lora_A['default_0'], 140533121805824) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_v.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_v.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.to_v.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_v.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_v.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.to_v.lora_A['default_0'].weight, 140533076442496) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_v.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.to_v.lora_B, 140533121806736) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_v.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_v.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.to_v.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_v.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.to_v.lora_B['default_0'], 140533121796560) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_v.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_v.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.to_v.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_v.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.to_v.base_layer, 140581767534416) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_v.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_v.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.to_v.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_v.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.to_v.lora_dropout, 140533121798576) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_v.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_v.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.to_v.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_v.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.to_v.lora_dropout['default_0'], 140533121809904) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_v.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_v.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.to_v.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_v.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[4].attn.to_v.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[4].attn.to_v.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_v.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[4].attn.to_v.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_v.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[4].attn.to_v.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[4].attn.to_v.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_v.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.to_v.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_v._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_v._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_v.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[4].attn.to_v.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[4].attn.to_v.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_v._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.to_v._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_v._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_v._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_v._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[4].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[4].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.norm_k, accessed_by=DictGetItemGuardAccessor(norm_k) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.norm_k, 140581767534272) # if attn.norm_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1729 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.norm_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[4].attn.norm_k.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.norm_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.norm_k.training, 140591004393440) # if attn.norm_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1729 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.norm_k.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[4].attn.norm_k.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.norm_k._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.norm_k.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.norm_k.weight, 140581766004096) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.norm_k._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.norm_k._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.norm_k._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.norm_k._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.norm_q, accessed_by=DictGetItemGuardAccessor(norm_q) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.norm_q, 140581767534176) # if attn.norm_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1727 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.norm_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[4].attn.norm_q.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.norm_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.norm_q.training, 140591004393440) # if attn.norm_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1727 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.norm_q.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[4].attn.norm_q.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.norm_q._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.norm_q.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.norm_q.weight, 140581783273312) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.norm_q._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.norm_q._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.norm_q._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.norm_q._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_out, accessed_by=DictGetItemGuardAccessor(to_out) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.to_out, 140581767534608) # hidden_states = attn.to_out[0](hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1776 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_out.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_out.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.to_out.training, 140591004393440) # hidden_states = attn.to_out[0](hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1776 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_out[0], accessed_by=GetItemGuardAccessor(0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.to_out[0], 140533121320704) # hidden_states = attn.to_out[0](hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1776 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_out[0].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[4].attn.to_out[0].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_out[0].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.to_out[0].training, 140591004393408) # hidden_states = attn.to_out[0](hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1776 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_out[0]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_out[0].lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.to_out[0].lora_A, 140533119886784) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_out[0].lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_out[0].lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.to_out[0].lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_out[0].lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.to_out[0].lora_A['default_0'], 140533119891488) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_out[0].lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_out[0].lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.to_out[0].lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_out[0].lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_out[0].lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.to_out[0].lora_A['default_0'].weight, 140537662774128) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_out[0].lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.to_out[0].lora_B, 140533119891248) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_out[0].lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_out[0].lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.to_out[0].lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_out[0].lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.to_out[0].lora_B['default_0'], 140533119885344) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_out[0].lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_out[0].lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.to_out[0].lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_out[0].base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.to_out[0].base_layer, 140581767534656) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_out[0].base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_out[0].base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.to_out[0].base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_out[0].lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.to_out[0].lora_dropout, 140533121334192) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_out[0].lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_out[0].lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.to_out[0].lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_out[0].lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.to_out[0].lora_dropout['default_0'], 140533121322672) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_out[0].lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_out[0].lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.to_out[0].lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_out[0].scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[4].attn.to_out[0].scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[4].attn.to_out[0].scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_out[0].scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[4].attn.to_out[0].scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_out[0].use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[4].attn.to_out[0].use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[4].attn.to_out[0].use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_out[0].use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.to_out[0].use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_out[0]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_out[0]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_out[0].merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[4].attn.to_out[0].merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[4].attn.to_out[0].merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_out[0]._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.to_out[0]._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_out[0]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_out[0]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_out[0]._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[4].attn.to_out[0]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[4].attn.to_out[0]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_out[1], accessed_by=GetItemGuardAccessor(1) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.to_out[1], 140581767534704) # hidden_states = attn.to_out[1](hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1778 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_out[1].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_out[1].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.to_out[1].training, 140591004393440) # hidden_states = attn.to_out[1](hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1778 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.add_k_proj, accessed_by=DictGetItemGuardAccessor(add_k_proj) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.add_k_proj, 140533121810288) # encoder_hidden_states_key_proj = attn.add_k_proj(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1736 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.add_k_proj.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[4].attn.add_k_proj.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.add_k_proj.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.add_k_proj.training, 140591004393408) # encoder_hidden_states_key_proj = attn.add_k_proj(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1736 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.add_k_proj._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.add_k_proj.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.add_k_proj.lora_A, 140533121808416) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.add_k_proj.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.add_k_proj.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.add_k_proj.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.add_k_proj.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.add_k_proj.lora_A['default_0'], 140533120704112) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.add_k_proj.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.add_k_proj.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.add_k_proj.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.add_k_proj.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.add_k_proj.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.add_k_proj.lora_A['default_0'].weight, 140533076435376) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.add_k_proj.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.add_k_proj.lora_B, 140533121807024) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.add_k_proj.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.add_k_proj.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.add_k_proj.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.add_k_proj.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.add_k_proj.lora_B['default_0'], 140533120701664) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.add_k_proj.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.add_k_proj.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.add_k_proj.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.add_k_proj.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.add_k_proj.base_layer, 140581767534464) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.add_k_proj.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.add_k_proj.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.add_k_proj.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.add_k_proj.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.add_k_proj.lora_dropout, 140533121808368) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.add_k_proj.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.add_k_proj.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.add_k_proj.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.add_k_proj.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.add_k_proj.lora_dropout['default_0'], 140533121807312) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.add_k_proj.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.add_k_proj.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.add_k_proj.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.add_k_proj.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[4].attn.add_k_proj.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[4].attn.add_k_proj.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.add_k_proj.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[4].attn.add_k_proj.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.add_k_proj.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[4].attn.add_k_proj.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[4].attn.add_k_proj.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.add_k_proj.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.add_k_proj.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.add_k_proj._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.add_k_proj._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.add_k_proj.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[4].attn.add_k_proj.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[4].attn.add_k_proj.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.add_k_proj._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.add_k_proj._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.add_k_proj._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.add_k_proj._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.add_k_proj._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[4].attn.add_k_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[4].attn.add_k_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.add_q_proj, accessed_by=DictGetItemGuardAccessor(add_q_proj) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.add_q_proj, 140533121319024) # encoder_hidden_states_query_proj = attn.add_q_proj(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1735 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.add_q_proj.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[4].attn.add_q_proj.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.add_q_proj.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.add_q_proj.training, 140591004393408) # encoder_hidden_states_query_proj = attn.add_q_proj(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1735 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.add_q_proj._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.add_q_proj.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.add_q_proj.lora_A, 140533121323104) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.add_q_proj.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.add_q_proj.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.add_q_proj.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.add_q_proj.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.add_q_proj.lora_A['default_0'], 140533121333808) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.add_q_proj.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.add_q_proj.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.add_q_proj.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.add_q_proj.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.add_q_proj.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.add_q_proj.lora_A['default_0'].weight, 140533076430656) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.add_q_proj.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.add_q_proj.lora_B, 140533121322816) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.add_q_proj.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.add_q_proj.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.add_q_proj.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.add_q_proj.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.add_q_proj.lora_B['default_0'], 140533121321040) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.add_q_proj.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.add_q_proj.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.add_q_proj.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.add_q_proj.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.add_q_proj.base_layer, 140581767534560) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.add_q_proj.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.add_q_proj.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.add_q_proj.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.add_q_proj.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.add_q_proj.lora_dropout, 140533121325408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.add_q_proj.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.add_q_proj.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.add_q_proj.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.add_q_proj.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.add_q_proj.lora_dropout['default_0'], 140533121320464) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.add_q_proj.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.add_q_proj.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.add_q_proj.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.add_q_proj.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[4].attn.add_q_proj.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[4].attn.add_q_proj.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.add_q_proj.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[4].attn.add_q_proj.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.add_q_proj.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[4].attn.add_q_proj.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[4].attn.add_q_proj.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.add_q_proj.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.add_q_proj.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.add_q_proj._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.add_q_proj._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.add_q_proj.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[4].attn.add_q_proj.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[4].attn.add_q_proj.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.add_q_proj._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.add_q_proj._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.add_q_proj._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.add_q_proj._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.add_q_proj._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[4].attn.add_q_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[4].attn.add_q_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.add_v_proj, accessed_by=DictGetItemGuardAccessor(add_v_proj) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.add_v_proj, 140533120710544) # encoder_hidden_states_value_proj = attn.add_v_proj(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1737 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.add_v_proj.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[4].attn.add_v_proj.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.add_v_proj.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.add_v_proj.training, 140591004393408) # encoder_hidden_states_value_proj = attn.add_v_proj(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1737 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.add_v_proj._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.add_v_proj.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.add_v_proj.lora_A, 140533120705216) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.add_v_proj.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.add_v_proj.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.add_v_proj.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.add_v_proj.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.add_v_proj.lora_A['default_0'], 140533121324784) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.add_v_proj.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.add_v_proj.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.add_v_proj.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.add_v_proj.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.add_v_proj.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.add_v_proj.lora_A['default_0'].weight, 140533076429856) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.add_v_proj.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.add_v_proj.lora_B, 140533120700944) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.add_v_proj.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.add_v_proj.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.add_v_proj.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.add_v_proj.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.add_v_proj.lora_B['default_0'], 140533121332032) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.add_v_proj.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.add_v_proj.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.add_v_proj.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.add_v_proj.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.add_v_proj.base_layer, 140581767534512) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.add_v_proj.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.add_v_proj.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.add_v_proj.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.add_v_proj.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.add_v_proj.lora_dropout, 140533120700176) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.add_v_proj.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.add_v_proj.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.add_v_proj.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.add_v_proj.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.add_v_proj.lora_dropout['default_0'], 140533120702432) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.add_v_proj.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.add_v_proj.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.add_v_proj.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.add_v_proj.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[4].attn.add_v_proj.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[4].attn.add_v_proj.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.add_v_proj.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[4].attn.add_v_proj.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.add_v_proj.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[4].attn.add_v_proj.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[4].attn.add_v_proj.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.add_v_proj.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.add_v_proj.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.add_v_proj._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.add_v_proj._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.add_v_proj.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[4].attn.add_v_proj.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[4].attn.add_v_proj.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.add_v_proj._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.add_v_proj._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.add_v_proj._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.add_v_proj._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.add_v_proj._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[4].attn.add_v_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[4].attn.add_v_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_add_out, accessed_by=DictGetItemGuardAccessor(to_add_out) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.to_add_out, 140533119892976) # encoder_hidden_states = attn.to_add_out(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1779 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_add_out.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[4].attn.to_add_out.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_add_out.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.to_add_out.training, 140591004393408) # encoder_hidden_states = attn.to_add_out(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1779 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_add_out._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_add_out.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.to_add_out.lora_A, 140533119891296) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_add_out.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_add_out.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.to_add_out.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_add_out.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.to_add_out.lora_A['default_0'], 140533119886640) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_add_out.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_add_out.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.to_add_out.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_add_out.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_add_out.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.to_add_out.lora_A['default_0'].weight, 140537662771808) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_add_out.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.to_add_out.lora_B, 140533119893360) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_add_out.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_add_out.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.to_add_out.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_add_out.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.to_add_out.lora_B['default_0'], 140533119891872) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_add_out.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_add_out.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.to_add_out.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_add_out.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.to_add_out.base_layer, 140581767534752) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_add_out.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_add_out.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.to_add_out.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_add_out.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.to_add_out.lora_dropout, 140533119883040) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_add_out.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_add_out.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.to_add_out.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_add_out.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.to_add_out.lora_dropout['default_0'], 140533119892592) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_add_out.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_add_out.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.to_add_out.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_add_out.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[4].attn.to_add_out.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[4].attn.to_add_out.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_add_out.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[4].attn.to_add_out.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_add_out.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[4].attn.to_add_out.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[4].attn.to_add_out.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_add_out.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.to_add_out.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_add_out._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_add_out._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_add_out.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[4].attn.to_add_out.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[4].attn.to_add_out.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_add_out._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.to_add_out._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_add_out._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_add_out._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.to_add_out._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[4].attn.to_add_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[4].attn.to_add_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.norm_added_k, accessed_by=DictGetItemGuardAccessor(norm_added_k) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.norm_added_k, 140581767534896) # if attn.norm_added_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1751 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.norm_added_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[4].attn.norm_added_k.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.norm_added_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.norm_added_k.training, 140591004393440) # if attn.norm_added_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1751 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.norm_added_k.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[4].attn.norm_added_k.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.norm_added_k._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.norm_added_k.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.norm_added_k.weight, 140581766003936) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.norm_added_k._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.norm_added_k._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.norm_added_k._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.norm_added_k._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.norm_added_q, accessed_by=DictGetItemGuardAccessor(norm_added_q) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.norm_added_q, 140581767534800) # if attn.norm_added_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1749 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.norm_added_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[4].attn.norm_added_q.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.norm_added_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.norm_added_q.training, 140591004393440) # if attn.norm_added_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1749 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.norm_added_q.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[4].attn.norm_added_q.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.norm_added_q._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.norm_added_q.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.norm_added_q.weight, 140581766004016) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.norm_added_q._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.norm_added_q._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.norm_added_q._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.norm_added_q._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.heads, accessed_by=DictGetItemGuardAccessor(heads) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[4].attn.heads == 24 # head_dim = inner_dim // attn.heads # diffusers/src/diffusers/models/attention_processor.py:1721 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn.processor, accessed_by=DictGetItemGuardAccessor(processor) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[4].attn.processor, 93831581524080) # attn_parameters = set(inspect.signature(self.processor.__call__).parameters.keys()) # diffusers/src/diffusers/models/attention_processor.py:479 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].attn.processor, 140581767534032) # return self.processor( # diffusers/src/diffusers/models/attention_processor.py:490 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].attn._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].norm1, accessed_by=DictGetItemGuardAccessor(norm1) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].norm1, 140581767533600) # norm_hidden_states, gate_msa, shift_mlp, scale_mlp, gate_mlp = self.norm1(hidden_states, emb=temb) # diffusers/src/diffusers/models/transformers/transformer_flux.py:165 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].norm1.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[4].norm1.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].norm1.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].norm1.training, 140591004393440) # norm_hidden_states, gate_msa, shift_mlp, scale_mlp, gate_mlp = self.norm1(hidden_states, emb=temb) # diffusers/src/diffusers/models/transformers/transformer_flux.py:165 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].norm1.emb, accessed_by=DictGetItemGuardAccessor(emb) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].norm1.emb, 140591004478624) # if self.emb is not None: # diffusers/src/diffusers/models/normalization.py:135 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].norm1._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].norm1.norm, accessed_by=DictGetItemGuardAccessor(norm) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].norm1.norm, 140581767533744) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:139 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].norm1.norm.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].norm1.norm.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].norm1.norm.training, 140591004393440) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:139 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].norm1.silu, accessed_by=DictGetItemGuardAccessor(silu) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].norm1.silu, 140581767533648) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].norm1.silu.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].norm1.silu.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].norm1.silu.training, 140591004393440) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].norm1.linear, accessed_by=DictGetItemGuardAccessor(linear) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].norm1.linear, 140533120319392) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].norm1.linear.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[4].norm1.linear.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].norm1.linear.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].norm1.linear.training, 140591004393408) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].norm1.linear._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].norm1.linear.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].norm1.linear.lora_A, 140533120572464) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].norm1.linear.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].norm1.linear.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].norm1.linear.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].norm1.linear.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].norm1.linear.lora_A['default_0'], 140533120840608) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].norm1.linear.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].norm1.linear.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].norm1.linear.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].norm1.linear.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].norm1.linear.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].norm1.linear.lora_A['default_0'].weight, 140533076438656) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].norm1.linear.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].norm1.linear.lora_B, 140533120571168) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].norm1.linear.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].norm1.linear.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].norm1.linear.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].norm1.linear.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].norm1.linear.lora_B['default_0'], 140533120838016) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].norm1.linear.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].norm1.linear.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].norm1.linear.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].norm1.linear.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].norm1.linear.base_layer, 140581767533696) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].norm1.linear.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].norm1.linear.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].norm1.linear.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].norm1.linear.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].norm1.linear.lora_dropout, 140533120318864) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].norm1.linear.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].norm1.linear.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].norm1.linear.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].norm1.linear.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].norm1.linear.lora_dropout['default_0'], 140533120309264) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].norm1.linear.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].norm1.linear.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].norm1.linear.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].norm1.linear.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[4].norm1.linear.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[4].norm1.linear.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].norm1.linear.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[4].norm1.linear.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].norm1.linear.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[4].norm1.linear.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[4].norm1.linear.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].norm1.linear.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].norm1.linear.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].norm1.linear._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].norm1.linear._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].norm1.linear.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[4].norm1.linear.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[4].norm1.linear.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].norm1.linear._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].norm1.linear._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].norm1.linear._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].norm1.linear._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].norm1.linear._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[4].norm1.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[4].norm1.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].norm1._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].norm1._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].norm1._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].norm1._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].norm2, accessed_by=DictGetItemGuardAccessor(norm2) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].norm2, 140581767534944) # norm_hidden_states = self.norm2(hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:182 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].norm2.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].norm2.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].norm2.training, 140591004393440) # norm_hidden_states = self.norm2(hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:182 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff_context, accessed_by=DictGetItemGuardAccessor(ff_context) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].ff_context, 140581767535280) # context_ff_output = self.ff_context(norm_encoder_hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:198 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff_context.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[4].ff_context.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff_context.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].ff_context.training, 140591004393440) # context_ff_output = self.ff_context(norm_encoder_hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:198 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff_context._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff_context.net, accessed_by=DictGetItemGuardAccessor(net) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].ff_context.net, 140581767535424) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[4].ff_context.net, 93831537618768) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- LENGTH_CHECK: len(L['self'].transformer_blocks[4].ff_context.net) == 3 # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff_context.net.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff_context.net.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].ff_context.net.training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff_context.net[0], accessed_by=GetItemGuardAccessor(0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].ff_context.net[0], 140581767535376) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff_context.net[0].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[4].ff_context.net[0].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff_context.net[0].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].ff_context.net[0].training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff_context.net[0]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff_context.net[0].proj, accessed_by=DictGetItemGuardAccessor(proj) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].ff_context.net[0].proj, 140533120774880) # hidden_states = self.proj(hidden_states) # diffusers/src/diffusers/models/activations.py:88 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff_context.net[0].proj.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[4].ff_context.net[0].proj.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff_context.net[0].proj.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].ff_context.net[0].proj.training, 140591004393408) # hidden_states = self.proj(hidden_states) # diffusers/src/diffusers/models/activations.py:88 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff_context.net[0].proj._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff_context.net[0].proj.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].ff_context.net[0].proj.lora_A, 140533120774304) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff_context.net[0].proj.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff_context.net[0].proj.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].ff_context.net[0].proj.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff_context.net[0].proj.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].ff_context.net[0].proj.lora_A['default_0'], 140533121754176) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff_context.net[0].proj.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff_context.net[0].proj.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].ff_context.net[0].proj.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff_context.net[0].proj.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff_context.net[0].proj.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].ff_context.net[0].proj.lora_A['default_0'].weight, 140533143172208) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff_context.net[0].proj.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].ff_context.net[0].proj.lora_B, 140533120767152) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff_context.net[0].proj.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff_context.net[0].proj.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].ff_context.net[0].proj.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff_context.net[0].proj.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].ff_context.net[0].proj.lora_B['default_0'], 140533121755424) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff_context.net[0].proj.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff_context.net[0].proj.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].ff_context.net[0].proj.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff_context.net[0].proj.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].ff_context.net[0].proj.base_layer, 140581767535472) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff_context.net[0].proj.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff_context.net[0].proj.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].ff_context.net[0].proj.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff_context.net[0].proj.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].ff_context.net[0].proj.lora_dropout, 140533120772960) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff_context.net[0].proj.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff_context.net[0].proj.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].ff_context.net[0].proj.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff_context.net[0].proj.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].ff_context.net[0].proj.lora_dropout['default_0'], 140533120762256) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff_context.net[0].proj.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff_context.net[0].proj.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].ff_context.net[0].proj.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff_context.net[0].proj.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[4].ff_context.net[0].proj.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[4].ff_context.net[0].proj.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff_context.net[0].proj.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[4].ff_context.net[0].proj.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff_context.net[0].proj.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[4].ff_context.net[0].proj.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[4].ff_context.net[0].proj.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff_context.net[0].proj.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].ff_context.net[0].proj.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff_context.net[0].proj._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff_context.net[0].proj._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff_context.net[0].proj.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[4].ff_context.net[0].proj.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[4].ff_context.net[0].proj.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff_context.net[0].proj._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].ff_context.net[0].proj._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff_context.net[0].proj._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff_context.net[0].proj._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff_context.net[0].proj._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[4].ff_context.net[0].proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[4].ff_context.net[0].proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff_context.net[0].approximate, accessed_by=DictGetItemGuardAccessor(approximate) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[4].ff_context.net[0].approximate == 'tanh' # return F.gelu(gate, approximate=self.approximate) # diffusers/src/diffusers/models/activations.py:83 in gelu V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff_context.net[0]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff_context.net[0]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff_context.net[0]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff_context.net[0]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff_context.net[1], accessed_by=GetItemGuardAccessor(1) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].ff_context.net[1], 140581767535568) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff_context.net[1].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff_context.net[1].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].ff_context.net[1].training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff_context.net[2], accessed_by=GetItemGuardAccessor(2) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].ff_context.net[2], 140533121757440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff_context.net[2].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[4].ff_context.net[2].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff_context.net[2].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].ff_context.net[2].training, 140591004393408) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff_context.net[2]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff_context.net[2].lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].ff_context.net[2].lora_A, 140533121760464) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff_context.net[2].lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff_context.net[2].lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].ff_context.net[2].lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff_context.net[2].lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].ff_context.net[2].lora_A['default_0'], 140533121755328) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff_context.net[2].lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff_context.net[2].lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].ff_context.net[2].lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff_context.net[2].lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff_context.net[2].lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].ff_context.net[2].lora_A['default_0'].weight, 140533141136752) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff_context.net[2].lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].ff_context.net[2].lora_B, 140533121746544) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff_context.net[2].lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff_context.net[2].lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].ff_context.net[2].lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff_context.net[2].lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].ff_context.net[2].lora_B['default_0'], 140533121757776) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff_context.net[2].lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff_context.net[2].lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].ff_context.net[2].lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff_context.net[2].base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].ff_context.net[2].base_layer, 140581767535616) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff_context.net[2].base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff_context.net[2].base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].ff_context.net[2].base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff_context.net[2].lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].ff_context.net[2].lora_dropout, 140533121750960) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff_context.net[2].lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff_context.net[2].lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].ff_context.net[2].lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff_context.net[2].lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].ff_context.net[2].lora_dropout['default_0'], 140533121745920) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff_context.net[2].lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff_context.net[2].lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].ff_context.net[2].lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff_context.net[2].scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[4].ff_context.net[2].scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[4].ff_context.net[2].scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff_context.net[2].scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[4].ff_context.net[2].scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff_context.net[2].use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[4].ff_context.net[2].use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[4].ff_context.net[2].use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff_context.net[2].use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].ff_context.net[2].use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff_context.net[2]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff_context.net[2]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff_context.net[2].merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[4].ff_context.net[2].merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[4].ff_context.net[2].merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff_context.net[2]._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].ff_context.net[2]._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff_context.net[2]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff_context.net[2]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff_context.net[2]._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[4].ff_context.net[2]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[4].ff_context.net[2]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff_context._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff_context._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff_context._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].ff_context._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].norm1_context, accessed_by=DictGetItemGuardAccessor(norm1_context) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].norm1_context, 140581767533792) # norm_encoder_hidden_states, c_gate_msa, c_shift_mlp, c_scale_mlp, c_gate_mlp = self.norm1_context( # diffusers/src/diffusers/models/transformers/transformer_flux.py:167 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].norm1_context.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[4].norm1_context.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].norm1_context.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].norm1_context.training, 140591004393440) # norm_encoder_hidden_states, c_gate_msa, c_shift_mlp, c_scale_mlp, c_gate_mlp = self.norm1_context( # diffusers/src/diffusers/models/transformers/transformer_flux.py:167 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].norm1_context.emb, accessed_by=DictGetItemGuardAccessor(emb) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].norm1_context.emb, 140591004478624) # if self.emb is not None: # diffusers/src/diffusers/models/normalization.py:135 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].norm1_context._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].norm1_context.norm, accessed_by=DictGetItemGuardAccessor(norm) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].norm1_context.norm, 140581767533984) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:139 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].norm1_context.norm.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].norm1_context.norm.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].norm1_context.norm.training, 140591004393440) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:139 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].norm1_context.silu, accessed_by=DictGetItemGuardAccessor(silu) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].norm1_context.silu, 140581767533888) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].norm1_context.silu.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].norm1_context.silu.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].norm1_context.silu.training, 140591004393440) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].norm1_context.linear, accessed_by=DictGetItemGuardAccessor(linear) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].norm1_context.linear, 140533120841472) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].norm1_context.linear.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[4].norm1_context.linear.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].norm1_context.linear.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].norm1_context.linear.training, 140591004393408) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].norm1_context.linear._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].norm1_context.linear.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].norm1_context.linear.lora_A, 140533120841184) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].norm1_context.linear.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].norm1_context.linear.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].norm1_context.linear.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].norm1_context.linear.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].norm1_context.linear.lora_A['default_0'], 140533120842720) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].norm1_context.linear.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].norm1_context.linear.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].norm1_context.linear.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].norm1_context.linear.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].norm1_context.linear.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].norm1_context.linear.lora_A['default_0'].weight, 140533076431696) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].norm1_context.linear.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].norm1_context.linear.lora_B, 140533120831824) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].norm1_context.linear.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].norm1_context.linear.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].norm1_context.linear.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].norm1_context.linear.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].norm1_context.linear.lora_B['default_0'], 140533120831968) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].norm1_context.linear.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].norm1_context.linear.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].norm1_context.linear.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].norm1_context.linear.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].norm1_context.linear.base_layer, 140581767533936) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].norm1_context.linear.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].norm1_context.linear.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].norm1_context.linear.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].norm1_context.linear.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].norm1_context.linear.lora_dropout, 140533120839696) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].norm1_context.linear.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].norm1_context.linear.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].norm1_context.linear.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].norm1_context.linear.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].norm1_context.linear.lora_dropout['default_0'], 140533120828656) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].norm1_context.linear.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].norm1_context.linear.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].norm1_context.linear.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].norm1_context.linear.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[4].norm1_context.linear.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[4].norm1_context.linear.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].norm1_context.linear.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[4].norm1_context.linear.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].norm1_context.linear.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[4].norm1_context.linear.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[4].norm1_context.linear.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].norm1_context.linear.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].norm1_context.linear.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].norm1_context.linear._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].norm1_context.linear._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].norm1_context.linear.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[4].norm1_context.linear.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[4].norm1_context.linear.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].norm1_context.linear._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].norm1_context.linear._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].norm1_context.linear._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].norm1_context.linear._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].norm1_context.linear._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[4].norm1_context.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[4].norm1_context.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].norm1_context._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].norm1_context._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].norm1_context._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].norm1_context._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].norm2_context, accessed_by=DictGetItemGuardAccessor(norm2_context) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].norm2_context, 140581767534992) # norm_encoder_hidden_states = self.norm2_context(encoder_hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:195 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].norm2_context.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4].norm2_context.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[4].norm2_context.training, 140591004393440) # norm_encoder_hidden_states = self.norm2_context(encoder_hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:195 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[4]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | +- GuardManager: source=L['self'].transformer_blocks[5], accessed_by=GetItemGuardAccessor(5) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5], 140581767533264) # for index_block, block in enumerate(self.transformer_blocks): # diffusers/src/diffusers/models/transformers/transformer_flux.py:471 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[5].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].training, 140591004393440) # for index_block, block in enumerate(self.transformer_blocks): # diffusers/src/diffusers/models/transformers/transformer_flux.py:471 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff, accessed_by=DictGetItemGuardAccessor(ff) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].ff, 140581767536912) # ff_output = self.ff(norm_hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:185 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[5].ff.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].ff.training, 140591004393440) # ff_output = self.ff(norm_hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:185 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff.net, accessed_by=DictGetItemGuardAccessor(net) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].ff.net, 140581767537152) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[5].ff.net, 93831537618768) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- LENGTH_CHECK: len(L['self'].transformer_blocks[5].ff.net) == 3 # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff.net.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff.net.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].ff.net.training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff.net[0], accessed_by=GetItemGuardAccessor(0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].ff.net[0], 140581767537104) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff.net[0].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[5].ff.net[0].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff.net[0].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].ff.net[0].training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff.net[0]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff.net[0].proj, accessed_by=DictGetItemGuardAccessor(proj) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].ff.net[0].proj, 140533120810672) # hidden_states = self.proj(hidden_states) # diffusers/src/diffusers/models/activations.py:88 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff.net[0].proj.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[5].ff.net[0].proj.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff.net[0].proj.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].ff.net[0].proj.training, 140591004393408) # hidden_states = self.proj(hidden_states) # diffusers/src/diffusers/models/activations.py:88 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff.net[0].proj._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff.net[0].proj.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].ff.net[0].proj.lora_A, 140533120805104) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff.net[0].proj.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff.net[0].proj.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].ff.net[0].proj.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff.net[0].proj.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].ff.net[0].proj.lora_A['default_0'], 140533120810912) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff.net[0].proj.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff.net[0].proj.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].ff.net[0].proj.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff.net[0].proj.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff.net[0].proj.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].ff.net[0].proj.lora_A['default_0'].weight, 140537665345376) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff.net[0].proj.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].ff.net[0].proj.lora_B, 140533120810336) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff.net[0].proj.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff.net[0].proj.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].ff.net[0].proj.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff.net[0].proj.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].ff.net[0].proj.lora_B['default_0'], 140533120810144) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff.net[0].proj.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff.net[0].proj.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].ff.net[0].proj.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff.net[0].proj.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].ff.net[0].proj.base_layer, 140581767537200) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff.net[0].proj.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff.net[0].proj.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].ff.net[0].proj.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff.net[0].proj.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].ff.net[0].proj.lora_dropout, 140533120806592) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff.net[0].proj.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff.net[0].proj.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].ff.net[0].proj.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff.net[0].proj.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].ff.net[0].proj.lora_dropout['default_0'], 140533120799008) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff.net[0].proj.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff.net[0].proj.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].ff.net[0].proj.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff.net[0].proj.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[5].ff.net[0].proj.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[5].ff.net[0].proj.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff.net[0].proj.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[5].ff.net[0].proj.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff.net[0].proj.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[5].ff.net[0].proj.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[5].ff.net[0].proj.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff.net[0].proj.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].ff.net[0].proj.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff.net[0].proj._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff.net[0].proj._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff.net[0].proj.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[5].ff.net[0].proj.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[5].ff.net[0].proj.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff.net[0].proj._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].ff.net[0].proj._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff.net[0].proj._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff.net[0].proj._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff.net[0].proj._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[5].ff.net[0].proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[5].ff.net[0].proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff.net[0].approximate, accessed_by=DictGetItemGuardAccessor(approximate) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[5].ff.net[0].approximate == 'tanh' # return F.gelu(gate, approximate=self.approximate) # diffusers/src/diffusers/models/activations.py:83 in gelu V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff.net[0]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff.net[0]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff.net[0]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff.net[0]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff.net[1], accessed_by=GetItemGuardAccessor(1) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].ff.net[1], 140581767537248) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff.net[1].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff.net[1].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].ff.net[1].training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff.net[2], accessed_by=GetItemGuardAccessor(2) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].ff.net[2], 140533120806352) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff.net[2].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[5].ff.net[2].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff.net[2].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].ff.net[2].training, 140591004393408) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff.net[2]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff.net[2].lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].ff.net[2].lora_A, 140533120795792) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff.net[2].lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff.net[2].lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].ff.net[2].lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff.net[2].lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].ff.net[2].lora_A['default_0'], 140533120689936) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff.net[2].lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff.net[2].lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].ff.net[2].lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff.net[2].lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff.net[2].lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].ff.net[2].lora_A['default_0'].weight, 140537665350896) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff.net[2].lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].ff.net[2].lora_B, 140533120801264) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff.net[2].lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff.net[2].lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].ff.net[2].lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff.net[2].lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].ff.net[2].lora_B['default_0'], 140533120682304) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff.net[2].lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff.net[2].lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].ff.net[2].lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff.net[2].base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].ff.net[2].base_layer, 140581767537296) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff.net[2].base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff.net[2].base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].ff.net[2].base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff.net[2].lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].ff.net[2].lora_dropout, 140533120805680) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff.net[2].lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff.net[2].lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].ff.net[2].lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff.net[2].lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].ff.net[2].lora_dropout['default_0'], 140533120802848) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff.net[2].lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff.net[2].lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].ff.net[2].lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff.net[2].scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[5].ff.net[2].scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[5].ff.net[2].scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff.net[2].scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[5].ff.net[2].scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff.net[2].use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[5].ff.net[2].use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[5].ff.net[2].use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff.net[2].use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].ff.net[2].use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff.net[2]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff.net[2]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff.net[2].merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[5].ff.net[2].merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[5].ff.net[2].merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff.net[2]._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].ff.net[2]._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff.net[2]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff.net[2]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff.net[2]._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[5].ff.net[2]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[5].ff.net[2]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn, accessed_by=DictGetItemGuardAccessor(attn) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn, 140581767536144) # attn_output, context_attn_output = self.attn( # diffusers/src/diffusers/models/transformers/transformer_flux.py:172 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[5].attn.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.training, 140591004393440) # attn_output, context_attn_output = self.attn( # diffusers/src/diffusers/models/transformers/transformer_flux.py:172 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_k, accessed_by=DictGetItemGuardAccessor(to_k) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.to_k, 140533121829664) # key = attn.to_k(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1717 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[5].attn.to_k.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.to_k.training, 140591004393408) # key = attn.to_k(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1717 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_k._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_k.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.to_k.lora_A, 140533121831440) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_k.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_k.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.to_k.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_k.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.to_k.lora_A['default_0'], 140533121839216) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_k.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_k.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.to_k.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_k.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_k.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.to_k.lora_A['default_0'].weight, 140537662675024) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_k.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.to_k.lora_B, 140533121829136) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_k.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_k.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.to_k.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_k.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.to_k.lora_B['default_0'], 140533121832928) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_k.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_k.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.to_k.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_k.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.to_k.base_layer, 140581767536288) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_k.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_k.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.to_k.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_k.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.to_k.lora_dropout, 140533121834944) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_k.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_k.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.to_k.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_k.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.to_k.lora_dropout['default_0'], 140533121836192) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_k.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_k.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.to_k.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_k.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[5].attn.to_k.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[5].attn.to_k.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_k.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[5].attn.to_k.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_k.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[5].attn.to_k.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[5].attn.to_k.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_k.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.to_k.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_k._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_k._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_k.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[5].attn.to_k.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[5].attn.to_k.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_k._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.to_k._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_k._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_k._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_k._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[5].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[5].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_q, accessed_by=DictGetItemGuardAccessor(to_q) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.to_q, 140533121842864) # query = attn.to_q(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1716 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[5].attn.to_q.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.to_q.training, 140591004393408) # query = attn.to_q(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1716 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_q._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_q.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.to_q.lora_A, 140533121830816) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_q.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_q.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.to_q.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_q.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.to_q.lora_A['default_0'], 140533121839168) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_q.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_q.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.to_q.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_q.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_q.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.to_q.lora_A['default_0'].weight, 140537662672864) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_q.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.to_q.lora_B, 140533121842912) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_q.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_q.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.to_q.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_q.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.to_q.lora_B['default_0'], 140533121826880) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_q.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_q.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.to_q.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_q.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.to_q.base_layer, 140581767536384) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_q.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_q.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.to_q.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_q.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.to_q.lora_dropout, 140533121831344) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_q.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_q.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.to_q.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_q.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.to_q.lora_dropout['default_0'], 140533121829904) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_q.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_q.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.to_q.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_q.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[5].attn.to_q.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[5].attn.to_q.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_q.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[5].attn.to_q.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_q.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[5].attn.to_q.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[5].attn.to_q.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_q.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.to_q.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_q._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_q._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_q.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[5].attn.to_q.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[5].attn.to_q.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_q._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.to_q._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_q._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_q._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_q._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[5].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[5].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_v, accessed_by=DictGetItemGuardAccessor(to_v) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.to_v, 140533121840080) # value = attn.to_v(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1718 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_v.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[5].attn.to_v.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_v.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.to_v.training, 140591004393408) # value = attn.to_v(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1718 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_v._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_v.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.to_v.lora_A, 140533121835904) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_v.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_v.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.to_v.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_v.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.to_v.lora_A['default_0'], 140533121834320) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_v.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_v.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.to_v.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_v.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_v.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.to_v.lora_A['default_0'].weight, 140537662681904) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_v.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.to_v.lora_B, 140533121834896) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_v.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_v.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.to_v.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_v.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.to_v.lora_B['default_0'], 140533121839888) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_v.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_v.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.to_v.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_v.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.to_v.base_layer, 140581767536480) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_v.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_v.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.to_v.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_v.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.to_v.lora_dropout, 140533121829568) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_v.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_v.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.to_v.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_v.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.to_v.lora_dropout['default_0'], 140533121837872) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_v.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_v.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.to_v.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_v.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[5].attn.to_v.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[5].attn.to_v.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_v.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[5].attn.to_v.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_v.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[5].attn.to_v.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[5].attn.to_v.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_v.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.to_v.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_v._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_v._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_v.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[5].attn.to_v.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[5].attn.to_v.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_v._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.to_v._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_v._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_v._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_v._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[5].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[5].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.norm_k, accessed_by=DictGetItemGuardAccessor(norm_k) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.norm_k, 140581767536336) # if attn.norm_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1729 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.norm_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[5].attn.norm_k.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.norm_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.norm_k.training, 140591004393440) # if attn.norm_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1729 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.norm_k.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[5].attn.norm_k.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.norm_k._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.norm_k.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.norm_k.weight, 140581765824816) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.norm_k._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.norm_k._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.norm_k._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.norm_k._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.norm_q, accessed_by=DictGetItemGuardAccessor(norm_q) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.norm_q, 140581767536240) # if attn.norm_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1727 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.norm_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[5].attn.norm_q.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.norm_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.norm_q.training, 140591004393440) # if attn.norm_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1727 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.norm_q.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[5].attn.norm_q.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.norm_q._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.norm_q.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.norm_q.weight, 140581772716176) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.norm_q._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.norm_q._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.norm_q._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.norm_q._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_out, accessed_by=DictGetItemGuardAccessor(to_out) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.to_out, 140581767536672) # hidden_states = attn.to_out[0](hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1776 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_out.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_out.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.to_out.training, 140591004393440) # hidden_states = attn.to_out[0](hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1776 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_out[0], accessed_by=GetItemGuardAccessor(0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.to_out[0], 140533120185728) # hidden_states = attn.to_out[0](hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1776 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_out[0].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[5].attn.to_out[0].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_out[0].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.to_out[0].training, 140591004393408) # hidden_states = attn.to_out[0](hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1776 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_out[0]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_out[0].lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.to_out[0].lora_A, 140533120175120) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_out[0].lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_out[0].lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.to_out[0].lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_out[0].lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.to_out[0].lora_A['default_0'], 140533120806640) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_out[0].lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_out[0].lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.to_out[0].lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_out[0].lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_out[0].lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.to_out[0].lora_A['default_0'].weight, 140537665354736) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_out[0].lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.to_out[0].lora_B, 140533120179872) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_out[0].lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_out[0].lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.to_out[0].lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_out[0].lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.to_out[0].lora_B['default_0'], 140533120809280) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_out[0].lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_out[0].lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.to_out[0].lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_out[0].base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.to_out[0].base_layer, 140581767536720) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_out[0].base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_out[0].base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.to_out[0].base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_out[0].lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.to_out[0].lora_dropout, 140533120177664) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_out[0].lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_out[0].lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.to_out[0].lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_out[0].lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.to_out[0].lora_dropout['default_0'], 140533120181792) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_out[0].lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_out[0].lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.to_out[0].lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_out[0].scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[5].attn.to_out[0].scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[5].attn.to_out[0].scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_out[0].scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[5].attn.to_out[0].scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_out[0].use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[5].attn.to_out[0].use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[5].attn.to_out[0].use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_out[0].use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.to_out[0].use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_out[0]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_out[0]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_out[0].merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[5].attn.to_out[0].merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[5].attn.to_out[0].merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_out[0]._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.to_out[0]._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_out[0]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_out[0]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_out[0]._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[5].attn.to_out[0]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[5].attn.to_out[0]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_out[1], accessed_by=GetItemGuardAccessor(1) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.to_out[1], 140581767536768) # hidden_states = attn.to_out[1](hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1778 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_out[1].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_out[1].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.to_out[1].training, 140591004393440) # hidden_states = attn.to_out[1](hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1778 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.add_k_proj, accessed_by=DictGetItemGuardAccessor(add_k_proj) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.add_k_proj, 140533121840656) # encoder_hidden_states_key_proj = attn.add_k_proj(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1736 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.add_k_proj.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[5].attn.add_k_proj.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.add_k_proj.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.add_k_proj.training, 140591004393408) # encoder_hidden_states_key_proj = attn.add_k_proj(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1736 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.add_k_proj._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.add_k_proj.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.add_k_proj.lora_A, 140533121831680) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.add_k_proj.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.add_k_proj.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.add_k_proj.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.add_k_proj.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.add_k_proj.lora_A['default_0'], 140533120643760) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.add_k_proj.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.add_k_proj.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.add_k_proj.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.add_k_proj.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.add_k_proj.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.add_k_proj.lora_A['default_0'].weight, 140533107400416) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.add_k_proj.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.add_k_proj.lora_B, 140533120644048) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.add_k_proj.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.add_k_proj.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.add_k_proj.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.add_k_proj.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.add_k_proj.lora_B['default_0'], 140533120641504) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.add_k_proj.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.add_k_proj.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.add_k_proj.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.add_k_proj.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.add_k_proj.base_layer, 140581767536528) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.add_k_proj.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.add_k_proj.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.add_k_proj.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.add_k_proj.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.add_k_proj.lora_dropout, 140533121828272) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.add_k_proj.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.add_k_proj.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.add_k_proj.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.add_k_proj.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.add_k_proj.lora_dropout['default_0'], 140533121835616) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.add_k_proj.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.add_k_proj.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.add_k_proj.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.add_k_proj.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[5].attn.add_k_proj.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[5].attn.add_k_proj.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.add_k_proj.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[5].attn.add_k_proj.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.add_k_proj.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[5].attn.add_k_proj.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[5].attn.add_k_proj.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.add_k_proj.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.add_k_proj.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.add_k_proj._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.add_k_proj._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.add_k_proj.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[5].attn.add_k_proj.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[5].attn.add_k_proj.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.add_k_proj._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.add_k_proj._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.add_k_proj._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.add_k_proj._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.add_k_proj._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[5].attn.add_k_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[5].attn.add_k_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.add_q_proj, accessed_by=DictGetItemGuardAccessor(add_q_proj) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.add_q_proj, 140533120644144) # encoder_hidden_states_query_proj = attn.add_q_proj(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1735 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.add_q_proj.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[5].attn.add_q_proj.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.add_q_proj.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.add_q_proj.training, 140591004393408) # encoder_hidden_states_query_proj = attn.add_q_proj(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1735 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.add_q_proj._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.add_q_proj.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.add_q_proj.lora_A, 140533120637712) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.add_q_proj.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.add_q_proj.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.add_q_proj.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.add_q_proj.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.add_q_proj.lora_A['default_0'], 140533120186496) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.add_q_proj.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.add_q_proj.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.add_q_proj.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.add_q_proj.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.add_q_proj.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.add_q_proj.lora_A['default_0'].weight, 140533107397856) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.add_q_proj.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.add_q_proj.lora_B, 140533120181360) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.add_q_proj.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.add_q_proj.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.add_q_proj.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.add_q_proj.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.add_q_proj.lora_B['default_0'], 140533120181552) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.add_q_proj.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.add_q_proj.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.add_q_proj.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.add_q_proj.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.add_q_proj.base_layer, 140581767536624) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.add_q_proj.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.add_q_proj.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.add_q_proj.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.add_q_proj.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.add_q_proj.lora_dropout, 140533120642368) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.add_q_proj.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.add_q_proj.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.add_q_proj.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.add_q_proj.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.add_q_proj.lora_dropout['default_0'], 140533120632624) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.add_q_proj.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.add_q_proj.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.add_q_proj.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.add_q_proj.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[5].attn.add_q_proj.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[5].attn.add_q_proj.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.add_q_proj.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[5].attn.add_q_proj.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.add_q_proj.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[5].attn.add_q_proj.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[5].attn.add_q_proj.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.add_q_proj.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.add_q_proj.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.add_q_proj._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.add_q_proj._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.add_q_proj.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[5].attn.add_q_proj.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[5].attn.add_q_proj.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.add_q_proj._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.add_q_proj._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.add_q_proj._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.add_q_proj._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.add_q_proj._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[5].attn.add_q_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[5].attn.add_q_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.add_v_proj, accessed_by=DictGetItemGuardAccessor(add_v_proj) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.add_v_proj, 140533120644096) # encoder_hidden_states_value_proj = attn.add_v_proj(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1737 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.add_v_proj.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[5].attn.add_v_proj.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.add_v_proj.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.add_v_proj.training, 140591004393408) # encoder_hidden_states_value_proj = attn.add_v_proj(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1737 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.add_v_proj._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.add_v_proj.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.add_v_proj.lora_A, 140533120647072) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.add_v_proj.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.add_v_proj.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.add_v_proj.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.add_v_proj.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.add_v_proj.lora_A['default_0'], 140533120642320) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.add_v_proj.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.add_v_proj.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.add_v_proj.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.add_v_proj.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.add_v_proj.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.add_v_proj.lora_A['default_0'].weight, 140533107394336) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.add_v_proj.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.add_v_proj.lora_B, 140533120644192) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.add_v_proj.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.add_v_proj.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.add_v_proj.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.add_v_proj.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.add_v_proj.lora_B['default_0'], 140533120636512) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.add_v_proj.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.add_v_proj.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.add_v_proj.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.add_v_proj.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.add_v_proj.base_layer, 140581767536576) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.add_v_proj.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.add_v_proj.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.add_v_proj.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.add_v_proj.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.add_v_proj.lora_dropout, 140533120642992) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.add_v_proj.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.add_v_proj.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.add_v_proj.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.add_v_proj.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.add_v_proj.lora_dropout['default_0'], 140533120640976) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.add_v_proj.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.add_v_proj.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.add_v_proj.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.add_v_proj.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[5].attn.add_v_proj.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[5].attn.add_v_proj.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.add_v_proj.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[5].attn.add_v_proj.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.add_v_proj.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[5].attn.add_v_proj.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[5].attn.add_v_proj.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.add_v_proj.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.add_v_proj.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.add_v_proj._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.add_v_proj._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.add_v_proj.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[5].attn.add_v_proj.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[5].attn.add_v_proj.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.add_v_proj._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.add_v_proj._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.add_v_proj._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.add_v_proj._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.add_v_proj._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[5].attn.add_v_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[5].attn.add_v_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_add_out, accessed_by=DictGetItemGuardAccessor(to_add_out) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.to_add_out, 140533120809904) # encoder_hidden_states = attn.to_add_out(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1779 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_add_out.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[5].attn.to_add_out.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_add_out.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.to_add_out.training, 140591004393408) # encoder_hidden_states = attn.to_add_out(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1779 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_add_out._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_add_out.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.to_add_out.lora_A, 140533120803808) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_add_out.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_add_out.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.to_add_out.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_add_out.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.to_add_out.lora_A['default_0'], 140533120802368) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_add_out.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_add_out.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.to_add_out.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_add_out.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_add_out.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.to_add_out.lora_A['default_0'].weight, 140537665341776) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_add_out.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.to_add_out.lora_B, 140533120810864) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_add_out.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_add_out.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.to_add_out.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_add_out.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.to_add_out.lora_B['default_0'], 140533120795888) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_add_out.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_add_out.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.to_add_out.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_add_out.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.to_add_out.base_layer, 140581767536816) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_add_out.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_add_out.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.to_add_out.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_add_out.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.to_add_out.lora_dropout, 140533120810624) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_add_out.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_add_out.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.to_add_out.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_add_out.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.to_add_out.lora_dropout['default_0'], 140533120803712) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_add_out.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_add_out.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.to_add_out.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_add_out.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[5].attn.to_add_out.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[5].attn.to_add_out.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_add_out.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[5].attn.to_add_out.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_add_out.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[5].attn.to_add_out.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[5].attn.to_add_out.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_add_out.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.to_add_out.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_add_out._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_add_out._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_add_out.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[5].attn.to_add_out.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[5].attn.to_add_out.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_add_out._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.to_add_out._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_add_out._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_add_out._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.to_add_out._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[5].attn.to_add_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[5].attn.to_add_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.norm_added_k, accessed_by=DictGetItemGuardAccessor(norm_added_k) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.norm_added_k, 140581767536960) # if attn.norm_added_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1751 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.norm_added_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[5].attn.norm_added_k.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.norm_added_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.norm_added_k.training, 140591004393440) # if attn.norm_added_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1751 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.norm_added_k.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[5].attn.norm_added_k.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.norm_added_k._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.norm_added_k.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.norm_added_k.weight, 140581772712736) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.norm_added_k._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.norm_added_k._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.norm_added_k._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.norm_added_k._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.norm_added_q, accessed_by=DictGetItemGuardAccessor(norm_added_q) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.norm_added_q, 140581767536864) # if attn.norm_added_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1749 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.norm_added_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[5].attn.norm_added_q.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.norm_added_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.norm_added_q.training, 140591004393440) # if attn.norm_added_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1749 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.norm_added_q.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[5].attn.norm_added_q.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.norm_added_q._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.norm_added_q.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.norm_added_q.weight, 140581772711536) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.norm_added_q._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.norm_added_q._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.norm_added_q._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.norm_added_q._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.heads, accessed_by=DictGetItemGuardAccessor(heads) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[5].attn.heads == 24 # head_dim = inner_dim // attn.heads # diffusers/src/diffusers/models/attention_processor.py:1721 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn.processor, accessed_by=DictGetItemGuardAccessor(processor) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[5].attn.processor, 93831581524080) # attn_parameters = set(inspect.signature(self.processor.__call__).parameters.keys()) # diffusers/src/diffusers/models/attention_processor.py:479 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].attn.processor, 140581767536096) # return self.processor( # diffusers/src/diffusers/models/attention_processor.py:490 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].attn._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].norm1, accessed_by=DictGetItemGuardAccessor(norm1) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].norm1, 140581767535664) # norm_hidden_states, gate_msa, shift_mlp, scale_mlp, gate_mlp = self.norm1(hidden_states, emb=temb) # diffusers/src/diffusers/models/transformers/transformer_flux.py:165 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].norm1.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[5].norm1.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].norm1.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].norm1.training, 140591004393440) # norm_hidden_states, gate_msa, shift_mlp, scale_mlp, gate_mlp = self.norm1(hidden_states, emb=temb) # diffusers/src/diffusers/models/transformers/transformer_flux.py:165 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].norm1.emb, accessed_by=DictGetItemGuardAccessor(emb) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].norm1.emb, 140591004478624) # if self.emb is not None: # diffusers/src/diffusers/models/normalization.py:135 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].norm1._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].norm1.norm, accessed_by=DictGetItemGuardAccessor(norm) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].norm1.norm, 140581767535808) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:139 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].norm1.norm.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].norm1.norm.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].norm1.norm.training, 140591004393440) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:139 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].norm1.silu, accessed_by=DictGetItemGuardAccessor(silu) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].norm1.silu, 140581767535712) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].norm1.silu.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].norm1.silu.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].norm1.silu.training, 140591004393440) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].norm1.linear, accessed_by=DictGetItemGuardAccessor(linear) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].norm1.linear, 140533121755904) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].norm1.linear.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[5].norm1.linear.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].norm1.linear.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].norm1.linear.training, 140591004393408) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].norm1.linear._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].norm1.linear.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].norm1.linear.lora_A, 140533120134320) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].norm1.linear.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].norm1.linear.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].norm1.linear.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].norm1.linear.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].norm1.linear.lora_A['default_0'], 140533120138208) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].norm1.linear.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].norm1.linear.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].norm1.linear.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].norm1.linear.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].norm1.linear.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].norm1.linear.lora_A['default_0'].weight, 140537662676704) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].norm1.linear.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].norm1.linear.lora_B, 140533120135952) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].norm1.linear.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].norm1.linear.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].norm1.linear.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].norm1.linear.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].norm1.linear.lora_B['default_0'], 140533120137968) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].norm1.linear.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].norm1.linear.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].norm1.linear.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].norm1.linear.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].norm1.linear.base_layer, 140581767535760) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].norm1.linear.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].norm1.linear.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].norm1.linear.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].norm1.linear.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].norm1.linear.lora_dropout, 140533120122992) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].norm1.linear.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].norm1.linear.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].norm1.linear.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].norm1.linear.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].norm1.linear.lora_dropout['default_0'], 140533120136384) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].norm1.linear.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].norm1.linear.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].norm1.linear.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].norm1.linear.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[5].norm1.linear.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[5].norm1.linear.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].norm1.linear.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[5].norm1.linear.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].norm1.linear.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[5].norm1.linear.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[5].norm1.linear.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].norm1.linear.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].norm1.linear.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].norm1.linear._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].norm1.linear._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].norm1.linear.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[5].norm1.linear.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[5].norm1.linear.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].norm1.linear._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].norm1.linear._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].norm1.linear._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].norm1.linear._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].norm1.linear._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[5].norm1.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[5].norm1.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].norm1._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].norm1._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].norm1._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].norm1._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].norm2, accessed_by=DictGetItemGuardAccessor(norm2) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].norm2, 140581767537008) # norm_hidden_states = self.norm2(hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:182 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].norm2.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].norm2.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].norm2.training, 140591004393440) # norm_hidden_states = self.norm2(hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:182 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff_context, accessed_by=DictGetItemGuardAccessor(ff_context) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].ff_context, 140581767537344) # context_ff_output = self.ff_context(norm_encoder_hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:198 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff_context.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[5].ff_context.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff_context.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].ff_context.training, 140591004393440) # context_ff_output = self.ff_context(norm_encoder_hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:198 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff_context._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff_context.net, accessed_by=DictGetItemGuardAccessor(net) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].ff_context.net, 140581767537488) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[5].ff_context.net, 93831537618768) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- LENGTH_CHECK: len(L['self'].transformer_blocks[5].ff_context.net) == 3 # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff_context.net.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff_context.net.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].ff_context.net.training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff_context.net[0], accessed_by=GetItemGuardAccessor(0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].ff_context.net[0], 140581767537440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff_context.net[0].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[5].ff_context.net[0].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff_context.net[0].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].ff_context.net[0].training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff_context.net[0]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff_context.net[0].proj, accessed_by=DictGetItemGuardAccessor(proj) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].ff_context.net[0].proj, 140533120682112) # hidden_states = self.proj(hidden_states) # diffusers/src/diffusers/models/activations.py:88 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff_context.net[0].proj.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[5].ff_context.net[0].proj.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff_context.net[0].proj.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].ff_context.net[0].proj.training, 140591004393408) # hidden_states = self.proj(hidden_states) # diffusers/src/diffusers/models/activations.py:88 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff_context.net[0].proj._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff_context.net[0].proj.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].ff_context.net[0].proj.lora_A, 140533120683168) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff_context.net[0].proj.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff_context.net[0].proj.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].ff_context.net[0].proj.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff_context.net[0].proj.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].ff_context.net[0].proj.lora_A['default_0'], 140533120690128) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff_context.net[0].proj.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff_context.net[0].proj.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].ff_context.net[0].proj.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff_context.net[0].proj.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff_context.net[0].proj.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].ff_context.net[0].proj.lora_A['default_0'].weight, 140537665342416) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff_context.net[0].proj.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].ff_context.net[0].proj.lora_B, 140533120682016) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff_context.net[0].proj.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff_context.net[0].proj.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].ff_context.net[0].proj.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff_context.net[0].proj.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].ff_context.net[0].proj.lora_B['default_0'], 140533120682208) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff_context.net[0].proj.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff_context.net[0].proj.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].ff_context.net[0].proj.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff_context.net[0].proj.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].ff_context.net[0].proj.base_layer, 140581767537536) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff_context.net[0].proj.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff_context.net[0].proj.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].ff_context.net[0].proj.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff_context.net[0].proj.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].ff_context.net[0].proj.lora_dropout, 140533120684416) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff_context.net[0].proj.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff_context.net[0].proj.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].ff_context.net[0].proj.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff_context.net[0].proj.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].ff_context.net[0].proj.lora_dropout['default_0'], 140533120694832) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff_context.net[0].proj.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff_context.net[0].proj.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].ff_context.net[0].proj.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff_context.net[0].proj.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[5].ff_context.net[0].proj.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[5].ff_context.net[0].proj.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff_context.net[0].proj.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[5].ff_context.net[0].proj.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff_context.net[0].proj.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[5].ff_context.net[0].proj.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[5].ff_context.net[0].proj.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff_context.net[0].proj.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].ff_context.net[0].proj.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff_context.net[0].proj._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff_context.net[0].proj._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff_context.net[0].proj.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[5].ff_context.net[0].proj.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[5].ff_context.net[0].proj.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff_context.net[0].proj._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].ff_context.net[0].proj._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff_context.net[0].proj._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff_context.net[0].proj._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff_context.net[0].proj._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[5].ff_context.net[0].proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[5].ff_context.net[0].proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff_context.net[0].approximate, accessed_by=DictGetItemGuardAccessor(approximate) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[5].ff_context.net[0].approximate == 'tanh' # return F.gelu(gate, approximate=self.approximate) # diffusers/src/diffusers/models/activations.py:83 in gelu V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff_context.net[0]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff_context.net[0]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff_context.net[0]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff_context.net[0]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff_context.net[1], accessed_by=GetItemGuardAccessor(1) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].ff_context.net[1], 140581767537632) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff_context.net[1].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff_context.net[1].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].ff_context.net[1].training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff_context.net[2], accessed_by=GetItemGuardAccessor(2) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].ff_context.net[2], 140533120253376) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff_context.net[2].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[5].ff_context.net[2].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff_context.net[2].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].ff_context.net[2].training, 140591004393408) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff_context.net[2]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff_context.net[2].lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].ff_context.net[2].lora_A, 140533120251648) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff_context.net[2].lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff_context.net[2].lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].ff_context.net[2].lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff_context.net[2].lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].ff_context.net[2].lora_A['default_0'], 140533120242336) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff_context.net[2].lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff_context.net[2].lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].ff_context.net[2].lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff_context.net[2].lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff_context.net[2].lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].ff_context.net[2].lora_A['default_0'].weight, 140537665344896) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff_context.net[2].lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].ff_context.net[2].lora_B, 140533120250976) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff_context.net[2].lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff_context.net[2].lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].ff_context.net[2].lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff_context.net[2].lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].ff_context.net[2].lora_B['default_0'], 140533120243776) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff_context.net[2].lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff_context.net[2].lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].ff_context.net[2].lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff_context.net[2].base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].ff_context.net[2].base_layer, 140581767537680) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff_context.net[2].base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff_context.net[2].base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].ff_context.net[2].base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff_context.net[2].lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].ff_context.net[2].lora_dropout, 140533120244688) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff_context.net[2].lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff_context.net[2].lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].ff_context.net[2].lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff_context.net[2].lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].ff_context.net[2].lora_dropout['default_0'], 140533120246656) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff_context.net[2].lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff_context.net[2].lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].ff_context.net[2].lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff_context.net[2].scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[5].ff_context.net[2].scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[5].ff_context.net[2].scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff_context.net[2].scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[5].ff_context.net[2].scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff_context.net[2].use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[5].ff_context.net[2].use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[5].ff_context.net[2].use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff_context.net[2].use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].ff_context.net[2].use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff_context.net[2]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff_context.net[2]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff_context.net[2].merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[5].ff_context.net[2].merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[5].ff_context.net[2].merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff_context.net[2]._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].ff_context.net[2]._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff_context.net[2]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff_context.net[2]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff_context.net[2]._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[5].ff_context.net[2]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[5].ff_context.net[2]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff_context._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff_context._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff_context._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].ff_context._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].norm1_context, accessed_by=DictGetItemGuardAccessor(norm1_context) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].norm1_context, 140581767535856) # norm_encoder_hidden_states, c_gate_msa, c_shift_mlp, c_scale_mlp, c_gate_mlp = self.norm1_context( # diffusers/src/diffusers/models/transformers/transformer_flux.py:167 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].norm1_context.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[5].norm1_context.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].norm1_context.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].norm1_context.training, 140591004393440) # norm_encoder_hidden_states, c_gate_msa, c_shift_mlp, c_scale_mlp, c_gate_mlp = self.norm1_context( # diffusers/src/diffusers/models/transformers/transformer_flux.py:167 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].norm1_context.emb, accessed_by=DictGetItemGuardAccessor(emb) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].norm1_context.emb, 140591004478624) # if self.emb is not None: # diffusers/src/diffusers/models/normalization.py:135 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].norm1_context._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].norm1_context.norm, accessed_by=DictGetItemGuardAccessor(norm) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].norm1_context.norm, 140581767536048) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:139 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].norm1_context.norm.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].norm1_context.norm.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].norm1_context.norm.training, 140591004393440) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:139 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].norm1_context.silu, accessed_by=DictGetItemGuardAccessor(silu) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].norm1_context.silu, 140581767535952) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].norm1_context.silu.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].norm1_context.silu.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].norm1_context.silu.training, 140591004393440) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].norm1_context.linear, accessed_by=DictGetItemGuardAccessor(linear) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].norm1_context.linear, 140533119889856) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].norm1_context.linear.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[5].norm1_context.linear.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].norm1_context.linear.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].norm1_context.linear.training, 140591004393408) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].norm1_context.linear._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].norm1_context.linear.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].norm1_context.linear.lora_A, 140533121842720) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].norm1_context.linear.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].norm1_context.linear.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].norm1_context.linear.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].norm1_context.linear.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].norm1_context.linear.lora_A['default_0'], 140533121841472) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].norm1_context.linear.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].norm1_context.linear.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].norm1_context.linear.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].norm1_context.linear.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].norm1_context.linear.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].norm1_context.linear.lora_A['default_0'].weight, 140537662679984) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].norm1_context.linear.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].norm1_context.linear.lora_B, 140533121841856) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].norm1_context.linear.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].norm1_context.linear.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].norm1_context.linear.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].norm1_context.linear.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].norm1_context.linear.lora_B['default_0'], 140533121840896) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].norm1_context.linear.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].norm1_context.linear.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].norm1_context.linear.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].norm1_context.linear.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].norm1_context.linear.base_layer, 140581767536000) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].norm1_context.linear.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].norm1_context.linear.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].norm1_context.linear.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].norm1_context.linear.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].norm1_context.linear.lora_dropout, 140533121835376) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].norm1_context.linear.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].norm1_context.linear.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].norm1_context.linear.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].norm1_context.linear.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].norm1_context.linear.lora_dropout['default_0'], 140533121842768) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].norm1_context.linear.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].norm1_context.linear.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].norm1_context.linear.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].norm1_context.linear.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[5].norm1_context.linear.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[5].norm1_context.linear.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].norm1_context.linear.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[5].norm1_context.linear.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].norm1_context.linear.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[5].norm1_context.linear.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[5].norm1_context.linear.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].norm1_context.linear.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].norm1_context.linear.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].norm1_context.linear._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].norm1_context.linear._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].norm1_context.linear.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[5].norm1_context.linear.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[5].norm1_context.linear.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].norm1_context.linear._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].norm1_context.linear._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].norm1_context.linear._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].norm1_context.linear._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].norm1_context.linear._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[5].norm1_context.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[5].norm1_context.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].norm1_context._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].norm1_context._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].norm1_context._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].norm1_context._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].norm2_context, accessed_by=DictGetItemGuardAccessor(norm2_context) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].norm2_context, 140581767537056) # norm_encoder_hidden_states = self.norm2_context(encoder_hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:195 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].norm2_context.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5].norm2_context.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[5].norm2_context.training, 140591004393440) # norm_encoder_hidden_states = self.norm2_context(encoder_hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:195 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[5]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | +- GuardManager: source=L['self'].transformer_blocks[6], accessed_by=GetItemGuardAccessor(6) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6], 140581767535328) # for index_block, block in enumerate(self.transformer_blocks): # diffusers/src/diffusers/models/transformers/transformer_flux.py:471 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[6].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].training, 140591004393440) # for index_block, block in enumerate(self.transformer_blocks): # diffusers/src/diffusers/models/transformers/transformer_flux.py:471 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff, accessed_by=DictGetItemGuardAccessor(ff) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].ff, 140581767538976) # ff_output = self.ff(norm_hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:185 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[6].ff.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].ff.training, 140591004393440) # ff_output = self.ff(norm_hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:185 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff.net, accessed_by=DictGetItemGuardAccessor(net) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].ff.net, 140581767539216) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[6].ff.net, 93831537618768) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- LENGTH_CHECK: len(L['self'].transformer_blocks[6].ff.net) == 3 # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff.net.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff.net.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].ff.net.training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff.net[0], accessed_by=GetItemGuardAccessor(0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].ff.net[0], 140581767539168) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff.net[0].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[6].ff.net[0].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff.net[0].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].ff.net[0].training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff.net[0]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff.net[0].proj, accessed_by=DictGetItemGuardAccessor(proj) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].ff.net[0].proj, 140533120876848) # hidden_states = self.proj(hidden_states) # diffusers/src/diffusers/models/activations.py:88 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff.net[0].proj.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[6].ff.net[0].proj.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff.net[0].proj.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].ff.net[0].proj.training, 140591004393408) # hidden_states = self.proj(hidden_states) # diffusers/src/diffusers/models/activations.py:88 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff.net[0].proj._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff.net[0].proj.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].ff.net[0].proj.lora_A, 140533120876656) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff.net[0].proj.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff.net[0].proj.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].ff.net[0].proj.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff.net[0].proj.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].ff.net[0].proj.lora_A['default_0'], 140533120886016) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff.net[0].proj.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff.net[0].proj.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].ff.net[0].proj.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff.net[0].proj.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff.net[0].proj.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].ff.net[0].proj.lora_A['default_0'].weight, 140537666929984) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff.net[0].proj.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].ff.net[0].proj.lora_B, 140533120885296) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff.net[0].proj.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff.net[0].proj.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].ff.net[0].proj.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff.net[0].proj.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].ff.net[0].proj.lora_B['default_0'], 140533120379840) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff.net[0].proj.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff.net[0].proj.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].ff.net[0].proj.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff.net[0].proj.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].ff.net[0].proj.base_layer, 140581767539264) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff.net[0].proj.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff.net[0].proj.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].ff.net[0].proj.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff.net[0].proj.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].ff.net[0].proj.lora_dropout, 140533120878576) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff.net[0].proj.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff.net[0].proj.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].ff.net[0].proj.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff.net[0].proj.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].ff.net[0].proj.lora_dropout['default_0'], 140533120882512) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff.net[0].proj.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff.net[0].proj.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].ff.net[0].proj.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff.net[0].proj.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[6].ff.net[0].proj.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[6].ff.net[0].proj.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff.net[0].proj.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[6].ff.net[0].proj.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff.net[0].proj.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[6].ff.net[0].proj.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[6].ff.net[0].proj.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff.net[0].proj.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].ff.net[0].proj.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff.net[0].proj._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff.net[0].proj._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff.net[0].proj.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[6].ff.net[0].proj.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[6].ff.net[0].proj.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff.net[0].proj._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].ff.net[0].proj._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff.net[0].proj._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff.net[0].proj._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff.net[0].proj._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[6].ff.net[0].proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[6].ff.net[0].proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff.net[0].approximate, accessed_by=DictGetItemGuardAccessor(approximate) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[6].ff.net[0].approximate == 'tanh' # return F.gelu(gate, approximate=self.approximate) # diffusers/src/diffusers/models/activations.py:83 in gelu V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff.net[0]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff.net[0]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff.net[0]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff.net[0]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff.net[1], accessed_by=GetItemGuardAccessor(1) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].ff.net[1], 140581767539312) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff.net[1].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff.net[1].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].ff.net[1].training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff.net[2], accessed_by=GetItemGuardAccessor(2) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].ff.net[2], 140533120380752) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff.net[2].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[6].ff.net[2].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff.net[2].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].ff.net[2].training, 140591004393408) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff.net[2]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff.net[2].lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].ff.net[2].lora_A, 140533120380656) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff.net[2].lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff.net[2].lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].ff.net[2].lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff.net[2].lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].ff.net[2].lora_A['default_0'], 140533120378448) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff.net[2].lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff.net[2].lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].ff.net[2].lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff.net[2].lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff.net[2].lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].ff.net[2].lora_A['default_0'].weight, 140542616458544) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff.net[2].lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].ff.net[2].lora_B, 140533120379072) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff.net[2].lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff.net[2].lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].ff.net[2].lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff.net[2].lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].ff.net[2].lora_B['default_0'], 140533120373600) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff.net[2].lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff.net[2].lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].ff.net[2].lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff.net[2].base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].ff.net[2].base_layer, 140581767539360) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff.net[2].base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff.net[2].base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].ff.net[2].base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff.net[2].lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].ff.net[2].lora_dropout, 140533120379120) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff.net[2].lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff.net[2].lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].ff.net[2].lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff.net[2].lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].ff.net[2].lora_dropout['default_0'], 140533120380224) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff.net[2].lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff.net[2].lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].ff.net[2].lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff.net[2].scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[6].ff.net[2].scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[6].ff.net[2].scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff.net[2].scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[6].ff.net[2].scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff.net[2].use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[6].ff.net[2].use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[6].ff.net[2].use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff.net[2].use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].ff.net[2].use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff.net[2]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff.net[2]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff.net[2].merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[6].ff.net[2].merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[6].ff.net[2].merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff.net[2]._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].ff.net[2]._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff.net[2]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff.net[2]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff.net[2]._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[6].ff.net[2]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[6].ff.net[2]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn, accessed_by=DictGetItemGuardAccessor(attn) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn, 140581767538208) # attn_output, context_attn_output = self.attn( # diffusers/src/diffusers/models/transformers/transformer_flux.py:172 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[6].attn.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.training, 140591004393440) # attn_output, context_attn_output = self.attn( # diffusers/src/diffusers/models/transformers/transformer_flux.py:172 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_k, accessed_by=DictGetItemGuardAccessor(to_k) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.to_k, 140533120188864) # key = attn.to_k(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1717 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[6].attn.to_k.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.to_k.training, 140591004393408) # key = attn.to_k(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1717 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_k._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_k.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.to_k.lora_A, 140533120197024) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_k.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_k.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.to_k.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_k.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.to_k.lora_A['default_0'], 140533120522784) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_k.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_k.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.to_k.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_k.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_k.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.to_k.lora_A['default_0'].weight, 140537668322224) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_k.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.to_k.lora_B, 140533120198704) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_k.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_k.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.to_k.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_k.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.to_k.lora_B['default_0'], 140533120530224) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_k.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_k.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.to_k.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_k.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.to_k.base_layer, 140581767538352) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_k.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_k.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.to_k.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_k.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.to_k.lora_dropout, 140533120202208) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_k.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_k.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.to_k.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_k.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.to_k.lora_dropout['default_0'], 140533120199472) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_k.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_k.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.to_k.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_k.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[6].attn.to_k.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[6].attn.to_k.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_k.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[6].attn.to_k.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_k.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[6].attn.to_k.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[6].attn.to_k.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_k.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.to_k.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_k._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_k._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_k.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[6].attn.to_k.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[6].attn.to_k.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_k._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.to_k._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_k._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_k._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_k._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[6].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[6].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_q, accessed_by=DictGetItemGuardAccessor(to_q) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.to_q, 140533120135232) # query = attn.to_q(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1716 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[6].attn.to_q.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.to_q.training, 140591004393408) # query = attn.to_q(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1716 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_q._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_q.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.to_q.lora_A, 140533120203696) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_q.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_q.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.to_q.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_q.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.to_q.lora_A['default_0'], 140533120192704) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_q.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_q.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.to_q.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_q.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_q.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.to_q.lora_A['default_0'].weight, 140537668336944) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_q.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.to_q.lora_B, 140533120201248) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_q.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_q.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.to_q.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_q.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.to_q.lora_B['default_0'], 140533120199424) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_q.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_q.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.to_q.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_q.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.to_q.base_layer, 140581767538448) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_q.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_q.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.to_q.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_q.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.to_q.lora_dropout, 140533120195488) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_q.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_q.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.to_q.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_q.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.to_q.lora_dropout['default_0'], 140533120203552) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_q.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_q.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.to_q.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_q.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[6].attn.to_q.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[6].attn.to_q.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_q.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[6].attn.to_q.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_q.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[6].attn.to_q.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[6].attn.to_q.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_q.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.to_q.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_q._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_q._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_q.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[6].attn.to_q.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[6].attn.to_q.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_q._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.to_q._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_q._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_q._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_q._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[6].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[6].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_v, accessed_by=DictGetItemGuardAccessor(to_v) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.to_v, 140533120526048) # value = attn.to_v(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1718 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_v.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[6].attn.to_v.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_v.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.to_v.training, 140591004393408) # value = attn.to_v(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1718 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_v._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_v.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.to_v.lora_A, 140533120530128) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_v.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_v.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.to_v.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_v.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.to_v.lora_A['default_0'], 140533120526672) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_v.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_v.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.to_v.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_v.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_v.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.to_v.lora_A['default_0'].weight, 140537668337024) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_v.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.to_v.lora_B, 140533120525616) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_v.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_v.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.to_v.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_v.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.to_v.lora_B['default_0'], 140533120520960) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_v.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_v.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.to_v.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_v.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.to_v.base_layer, 140581767538544) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_v.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_v.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.to_v.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_v.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.to_v.lora_dropout, 140533120526096) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_v.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_v.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.to_v.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_v.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.to_v.lora_dropout['default_0'], 140533120525856) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_v.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_v.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.to_v.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_v.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[6].attn.to_v.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[6].attn.to_v.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_v.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[6].attn.to_v.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_v.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[6].attn.to_v.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[6].attn.to_v.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_v.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.to_v.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_v._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_v._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_v.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[6].attn.to_v.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[6].attn.to_v.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_v._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.to_v._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_v._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_v._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_v._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[6].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[6].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.norm_k, accessed_by=DictGetItemGuardAccessor(norm_k) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.norm_k, 140581767538400) # if attn.norm_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1729 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.norm_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[6].attn.norm_k.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.norm_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.norm_k.training, 140591004393440) # if attn.norm_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1729 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.norm_k.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[6].attn.norm_k.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.norm_k._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.norm_k.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.norm_k.weight, 140581772709296) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.norm_k._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.norm_k._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.norm_k._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.norm_k._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.norm_q, accessed_by=DictGetItemGuardAccessor(norm_q) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.norm_q, 140581767538304) # if attn.norm_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1727 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.norm_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[6].attn.norm_q.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.norm_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.norm_q.training, 140591004393440) # if attn.norm_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1727 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.norm_q.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[6].attn.norm_q.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.norm_q._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.norm_q.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.norm_q.weight, 140581783065344) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.norm_q._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.norm_q._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.norm_q._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.norm_q._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_out, accessed_by=DictGetItemGuardAccessor(to_out) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.to_out, 140581767538736) # hidden_states = attn.to_out[0](hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1776 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_out.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_out.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.to_out.training, 140591004393440) # hidden_states = attn.to_out[0](hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1776 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_out[0], accessed_by=GetItemGuardAccessor(0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.to_out[0], 140533120879968) # hidden_states = attn.to_out[0](hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1776 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_out[0].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[6].attn.to_out[0].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_out[0].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.to_out[0].training, 140591004393408) # hidden_states = attn.to_out[0](hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1776 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_out[0]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_out[0].lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.to_out[0].lora_A, 140533120879872) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_out[0].lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_out[0].lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.to_out[0].lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_out[0].lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.to_out[0].lora_A['default_0'], 140533120884384) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_out[0].lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_out[0].lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.to_out[0].lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_out[0].lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_out[0].lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.to_out[0].lora_A['default_0'].weight, 140537666930064) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_out[0].lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.to_out[0].lora_B, 140533120879104) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_out[0].lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_out[0].lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.to_out[0].lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_out[0].lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.to_out[0].lora_B['default_0'], 140533120879776) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_out[0].lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_out[0].lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.to_out[0].lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_out[0].base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.to_out[0].base_layer, 140581767538784) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_out[0].base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_out[0].base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.to_out[0].base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_out[0].lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.to_out[0].lora_dropout, 140533120877712) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_out[0].lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_out[0].lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.to_out[0].lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_out[0].lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.to_out[0].lora_dropout['default_0'], 140533120876896) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_out[0].lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_out[0].lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.to_out[0].lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_out[0].scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[6].attn.to_out[0].scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[6].attn.to_out[0].scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_out[0].scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[6].attn.to_out[0].scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_out[0].use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[6].attn.to_out[0].use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[6].attn.to_out[0].use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_out[0].use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.to_out[0].use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_out[0]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_out[0]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_out[0].merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[6].attn.to_out[0].merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[6].attn.to_out[0].merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_out[0]._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.to_out[0]._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_out[0]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_out[0]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_out[0]._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[6].attn.to_out[0]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[6].attn.to_out[0]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_out[1], accessed_by=GetItemGuardAccessor(1) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.to_out[1], 140581767538832) # hidden_states = attn.to_out[1](hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1778 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_out[1].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_out[1].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.to_out[1].training, 140591004393440) # hidden_states = attn.to_out[1](hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1778 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.add_k_proj, accessed_by=DictGetItemGuardAccessor(add_k_proj) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.add_k_proj, 140533120251264) # encoder_hidden_states_key_proj = attn.add_k_proj(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1736 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.add_k_proj.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[6].attn.add_k_proj.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.add_k_proj.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.add_k_proj.training, 140591004393408) # encoder_hidden_states_key_proj = attn.add_k_proj(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1736 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.add_k_proj._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.add_k_proj.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.add_k_proj.lora_A, 140533120891584) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.add_k_proj.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.add_k_proj.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.add_k_proj.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.add_k_proj.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.add_k_proj.lora_A['default_0'], 140533120888896) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.add_k_proj.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.add_k_proj.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.add_k_proj.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.add_k_proj.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.add_k_proj.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.add_k_proj.lora_A['default_0'].weight, 140542611968288) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.add_k_proj.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.add_k_proj.lora_B, 140533120892832) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.add_k_proj.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.add_k_proj.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.add_k_proj.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.add_k_proj.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.add_k_proj.lora_B['default_0'], 140533120892208) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.add_k_proj.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.add_k_proj.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.add_k_proj.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.add_k_proj.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.add_k_proj.base_layer, 140581767538592) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.add_k_proj.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.add_k_proj.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.add_k_proj.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.add_k_proj.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.add_k_proj.lora_dropout, 140533120892256) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.add_k_proj.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.add_k_proj.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.add_k_proj.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.add_k_proj.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.add_k_proj.lora_dropout['default_0'], 140533120884576) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.add_k_proj.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.add_k_proj.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.add_k_proj.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.add_k_proj.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[6].attn.add_k_proj.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[6].attn.add_k_proj.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.add_k_proj.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[6].attn.add_k_proj.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.add_k_proj.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[6].attn.add_k_proj.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[6].attn.add_k_proj.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.add_k_proj.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.add_k_proj.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.add_k_proj._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.add_k_proj._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.add_k_proj.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[6].attn.add_k_proj.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[6].attn.add_k_proj.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.add_k_proj._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.add_k_proj._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.add_k_proj._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.add_k_proj._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.add_k_proj._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[6].attn.add_k_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[6].attn.add_k_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.add_q_proj, accessed_by=DictGetItemGuardAccessor(add_q_proj) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.add_q_proj, 140533120892304) # encoder_hidden_states_query_proj = attn.add_q_proj(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1735 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.add_q_proj.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[6].attn.add_q_proj.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.add_q_proj.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.add_q_proj.training, 140591004393408) # encoder_hidden_states_query_proj = attn.add_q_proj(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1735 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.add_q_proj._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.add_q_proj.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.add_q_proj.lora_A, 140533120877040) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.add_q_proj.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.add_q_proj.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.add_q_proj.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.add_q_proj.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.add_q_proj.lora_A['default_0'], 140533120880208) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.add_q_proj.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.add_q_proj.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.add_q_proj.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.add_q_proj.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.add_q_proj.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.add_q_proj.lora_A['default_0'].weight, 140537666942784) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.add_q_proj.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.add_q_proj.lora_B, 140533120886832) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.add_q_proj.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.add_q_proj.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.add_q_proj.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.add_q_proj.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.add_q_proj.lora_B['default_0'], 140533120881168) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.add_q_proj.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.add_q_proj.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.add_q_proj.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.add_q_proj.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.add_q_proj.base_layer, 140581767538688) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.add_q_proj.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.add_q_proj.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.add_q_proj.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.add_q_proj.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.add_q_proj.lora_dropout, 140533120883616) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.add_q_proj.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.add_q_proj.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.add_q_proj.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.add_q_proj.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.add_q_proj.lora_dropout['default_0'], 140533120892352) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.add_q_proj.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.add_q_proj.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.add_q_proj.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.add_q_proj.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[6].attn.add_q_proj.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[6].attn.add_q_proj.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.add_q_proj.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[6].attn.add_q_proj.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.add_q_proj.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[6].attn.add_q_proj.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[6].attn.add_q_proj.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.add_q_proj.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.add_q_proj.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.add_q_proj._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.add_q_proj._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.add_q_proj.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[6].attn.add_q_proj.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[6].attn.add_q_proj.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.add_q_proj._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.add_q_proj._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.add_q_proj._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.add_q_proj._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.add_q_proj._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[6].attn.add_q_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[6].attn.add_q_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.add_v_proj, accessed_by=DictGetItemGuardAccessor(add_v_proj) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.add_v_proj, 140533120884720) # encoder_hidden_states_value_proj = attn.add_v_proj(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1737 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.add_v_proj.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[6].attn.add_v_proj.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.add_v_proj.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.add_v_proj.training, 140591004393408) # encoder_hidden_states_value_proj = attn.add_v_proj(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1737 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.add_v_proj._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.add_v_proj.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.add_v_proj.lora_A, 140533120884144) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.add_v_proj.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.add_v_proj.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.add_v_proj.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.add_v_proj.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.add_v_proj.lora_A['default_0'], 140533120878000) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.add_v_proj.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.add_v_proj.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.add_v_proj.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.add_v_proj.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.add_v_proj.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.add_v_proj.lora_A['default_0'].weight, 140542611979248) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.add_v_proj.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.add_v_proj.lora_B, 140533120884864) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.add_v_proj.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.add_v_proj.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.add_v_proj.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.add_v_proj.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.add_v_proj.lora_B['default_0'], 140533120884624) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.add_v_proj.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.add_v_proj.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.add_v_proj.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.add_v_proj.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.add_v_proj.base_layer, 140581767538640) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.add_v_proj.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.add_v_proj.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.add_v_proj.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.add_v_proj.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.add_v_proj.lora_dropout, 140533120886880) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.add_v_proj.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.add_v_proj.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.add_v_proj.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.add_v_proj.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.add_v_proj.lora_dropout['default_0'], 140533120886448) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.add_v_proj.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.add_v_proj.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.add_v_proj.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.add_v_proj.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[6].attn.add_v_proj.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[6].attn.add_v_proj.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.add_v_proj.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[6].attn.add_v_proj.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.add_v_proj.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[6].attn.add_v_proj.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[6].attn.add_v_proj.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.add_v_proj.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.add_v_proj.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.add_v_proj._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.add_v_proj._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.add_v_proj.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[6].attn.add_v_proj.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[6].attn.add_v_proj.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.add_v_proj._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.add_v_proj._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.add_v_proj._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.add_v_proj._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.add_v_proj._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[6].attn.add_v_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[6].attn.add_v_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_add_out, accessed_by=DictGetItemGuardAccessor(to_add_out) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.to_add_out, 140533120878480) # encoder_hidden_states = attn.to_add_out(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1779 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_add_out.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[6].attn.to_add_out.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_add_out.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.to_add_out.training, 140591004393408) # encoder_hidden_states = attn.to_add_out(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1779 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_add_out._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_add_out.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.to_add_out.lora_A, 140533120878816) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_add_out.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_add_out.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.to_add_out.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_add_out.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.to_add_out.lora_A['default_0'], 140533120882848) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_add_out.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_add_out.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.to_add_out.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_add_out.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_add_out.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.to_add_out.lora_A['default_0'].weight, 140537666937344) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_add_out.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.to_add_out.lora_B, 140533120879536) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_add_out.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_add_out.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.to_add_out.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_add_out.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.to_add_out.lora_B['default_0'], 140533120880112) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_add_out.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_add_out.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.to_add_out.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_add_out.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.to_add_out.base_layer, 140581767538880) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_add_out.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_add_out.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.to_add_out.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_add_out.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.to_add_out.lora_dropout, 140533120877328) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_add_out.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_add_out.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.to_add_out.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_add_out.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.to_add_out.lora_dropout['default_0'], 140533120884000) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_add_out.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_add_out.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.to_add_out.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_add_out.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[6].attn.to_add_out.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[6].attn.to_add_out.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_add_out.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[6].attn.to_add_out.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_add_out.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[6].attn.to_add_out.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[6].attn.to_add_out.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_add_out.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.to_add_out.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_add_out._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_add_out._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_add_out.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[6].attn.to_add_out.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[6].attn.to_add_out.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_add_out._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.to_add_out._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_add_out._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_add_out._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.to_add_out._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[6].attn.to_add_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[6].attn.to_add_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.norm_added_k, accessed_by=DictGetItemGuardAccessor(norm_added_k) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.norm_added_k, 140581767539024) # if attn.norm_added_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1751 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.norm_added_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[6].attn.norm_added_k.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.norm_added_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.norm_added_k.training, 140591004393440) # if attn.norm_added_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1751 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.norm_added_k.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[6].attn.norm_added_k.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.norm_added_k._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.norm_added_k.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.norm_added_k.weight, 140581772709936) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.norm_added_k._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.norm_added_k._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.norm_added_k._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.norm_added_k._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.norm_added_q, accessed_by=DictGetItemGuardAccessor(norm_added_q) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.norm_added_q, 140581767538928) # if attn.norm_added_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1749 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.norm_added_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[6].attn.norm_added_q.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.norm_added_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.norm_added_q.training, 140591004393440) # if attn.norm_added_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1749 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.norm_added_q.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[6].attn.norm_added_q.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.norm_added_q._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.norm_added_q.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.norm_added_q.weight, 140581772709216) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.norm_added_q._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.norm_added_q._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.norm_added_q._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.norm_added_q._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.heads, accessed_by=DictGetItemGuardAccessor(heads) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[6].attn.heads == 24 # head_dim = inner_dim // attn.heads # diffusers/src/diffusers/models/attention_processor.py:1721 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn.processor, accessed_by=DictGetItemGuardAccessor(processor) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[6].attn.processor, 93831581524080) # attn_parameters = set(inspect.signature(self.processor.__call__).parameters.keys()) # diffusers/src/diffusers/models/attention_processor.py:479 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].attn.processor, 140581767538160) # return self.processor( # diffusers/src/diffusers/models/attention_processor.py:490 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].attn._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].norm1, accessed_by=DictGetItemGuardAccessor(norm1) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].norm1, 140581767537728) # norm_hidden_states, gate_msa, shift_mlp, scale_mlp, gate_mlp = self.norm1(hidden_states, emb=temb) # diffusers/src/diffusers/models/transformers/transformer_flux.py:165 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].norm1.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[6].norm1.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].norm1.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].norm1.training, 140591004393440) # norm_hidden_states, gate_msa, shift_mlp, scale_mlp, gate_mlp = self.norm1(hidden_states, emb=temb) # diffusers/src/diffusers/models/transformers/transformer_flux.py:165 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].norm1.emb, accessed_by=DictGetItemGuardAccessor(emb) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].norm1.emb, 140591004478624) # if self.emb is not None: # diffusers/src/diffusers/models/normalization.py:135 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].norm1._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].norm1.norm, accessed_by=DictGetItemGuardAccessor(norm) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].norm1.norm, 140581767537872) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:139 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].norm1.norm.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].norm1.norm.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].norm1.norm.training, 140591004393440) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:139 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].norm1.silu, accessed_by=DictGetItemGuardAccessor(silu) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].norm1.silu, 140581767537776) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].norm1.silu.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].norm1.silu.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].norm1.silu.training, 140591004393440) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].norm1.linear, accessed_by=DictGetItemGuardAccessor(linear) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].norm1.linear, 140533120244400) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].norm1.linear.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[6].norm1.linear.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].norm1.linear.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].norm1.linear.training, 140591004393408) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].norm1.linear._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].norm1.linear.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].norm1.linear.lora_A, 140533120243728) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].norm1.linear.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].norm1.linear.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].norm1.linear.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].norm1.linear.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].norm1.linear.lora_A['default_0'], 140533120244304) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].norm1.linear.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].norm1.linear.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].norm1.linear.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].norm1.linear.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].norm1.linear.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].norm1.linear.lora_A['default_0'].weight, 140537668333824) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].norm1.linear.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].norm1.linear.lora_B, 140533120245312) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].norm1.linear.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].norm1.linear.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].norm1.linear.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].norm1.linear.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].norm1.linear.lora_B['default_0'], 140533120243584) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].norm1.linear.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].norm1.linear.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].norm1.linear.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].norm1.linear.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].norm1.linear.base_layer, 140581767537824) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].norm1.linear.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].norm1.linear.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].norm1.linear.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].norm1.linear.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].norm1.linear.lora_dropout, 140533120246320) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].norm1.linear.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].norm1.linear.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].norm1.linear.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].norm1.linear.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].norm1.linear.lora_dropout['default_0'], 140533120246272) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].norm1.linear.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].norm1.linear.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].norm1.linear.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].norm1.linear.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[6].norm1.linear.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[6].norm1.linear.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].norm1.linear.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[6].norm1.linear.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].norm1.linear.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[6].norm1.linear.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[6].norm1.linear.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].norm1.linear.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].norm1.linear.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].norm1.linear._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].norm1.linear._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].norm1.linear.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[6].norm1.linear.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[6].norm1.linear.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].norm1.linear._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].norm1.linear._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].norm1.linear._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].norm1.linear._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].norm1.linear._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[6].norm1.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[6].norm1.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].norm1._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].norm1._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].norm1._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].norm1._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].norm2, accessed_by=DictGetItemGuardAccessor(norm2) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].norm2, 140581767539072) # norm_hidden_states = self.norm2(hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:182 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].norm2.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].norm2.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].norm2.training, 140591004393440) # norm_hidden_states = self.norm2(hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:182 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff_context, accessed_by=DictGetItemGuardAccessor(ff_context) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].ff_context, 140581767539408) # context_ff_output = self.ff_context(norm_encoder_hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:198 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff_context.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[6].ff_context.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff_context.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].ff_context.training, 140591004393440) # context_ff_output = self.ff_context(norm_encoder_hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:198 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff_context._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff_context.net, accessed_by=DictGetItemGuardAccessor(net) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].ff_context.net, 140581767539552) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[6].ff_context.net, 93831537618768) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- LENGTH_CHECK: len(L['self'].transformer_blocks[6].ff_context.net) == 3 # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff_context.net.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff_context.net.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].ff_context.net.training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff_context.net[0], accessed_by=GetItemGuardAccessor(0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].ff_context.net[0], 140581767539504) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff_context.net[0].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[6].ff_context.net[0].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff_context.net[0].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].ff_context.net[0].training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff_context.net[0]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff_context.net[0].proj, accessed_by=DictGetItemGuardAccessor(proj) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].ff_context.net[0].proj, 140533120377968) # hidden_states = self.proj(hidden_states) # diffusers/src/diffusers/models/activations.py:88 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff_context.net[0].proj.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[6].ff_context.net[0].proj.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff_context.net[0].proj.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].ff_context.net[0].proj.training, 140591004393408) # hidden_states = self.proj(hidden_states) # diffusers/src/diffusers/models/activations.py:88 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff_context.net[0].proj._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff_context.net[0].proj.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].ff_context.net[0].proj.lora_A, 140533120377776) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff_context.net[0].proj.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff_context.net[0].proj.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].ff_context.net[0].proj.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff_context.net[0].proj.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].ff_context.net[0].proj.lora_A['default_0'], 140533120376960) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff_context.net[0].proj.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff_context.net[0].proj.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].ff_context.net[0].proj.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff_context.net[0].proj.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff_context.net[0].proj.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].ff_context.net[0].proj.lora_A['default_0'].weight, 140542616460704) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff_context.net[0].proj.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].ff_context.net[0].proj.lora_B, 140533120380896) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff_context.net[0].proj.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff_context.net[0].proj.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].ff_context.net[0].proj.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff_context.net[0].proj.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].ff_context.net[0].proj.lora_B['default_0'], 140533120377872) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff_context.net[0].proj.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff_context.net[0].proj.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].ff_context.net[0].proj.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff_context.net[0].proj.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].ff_context.net[0].proj.base_layer, 140581767539600) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff_context.net[0].proj.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff_context.net[0].proj.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].ff_context.net[0].proj.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff_context.net[0].proj.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].ff_context.net[0].proj.lora_dropout, 140533120383824) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff_context.net[0].proj.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff_context.net[0].proj.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].ff_context.net[0].proj.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff_context.net[0].proj.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].ff_context.net[0].proj.lora_dropout['default_0'], 140533120378736) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff_context.net[0].proj.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff_context.net[0].proj.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].ff_context.net[0].proj.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff_context.net[0].proj.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[6].ff_context.net[0].proj.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[6].ff_context.net[0].proj.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff_context.net[0].proj.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[6].ff_context.net[0].proj.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff_context.net[0].proj.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[6].ff_context.net[0].proj.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[6].ff_context.net[0].proj.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff_context.net[0].proj.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].ff_context.net[0].proj.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff_context.net[0].proj._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff_context.net[0].proj._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff_context.net[0].proj.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[6].ff_context.net[0].proj.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[6].ff_context.net[0].proj.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff_context.net[0].proj._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].ff_context.net[0].proj._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff_context.net[0].proj._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff_context.net[0].proj._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff_context.net[0].proj._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[6].ff_context.net[0].proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[6].ff_context.net[0].proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff_context.net[0].approximate, accessed_by=DictGetItemGuardAccessor(approximate) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[6].ff_context.net[0].approximate == 'tanh' # return F.gelu(gate, approximate=self.approximate) # diffusers/src/diffusers/models/activations.py:83 in gelu V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff_context.net[0]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff_context.net[0]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff_context.net[0]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff_context.net[0]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff_context.net[1], accessed_by=GetItemGuardAccessor(1) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].ff_context.net[1], 140581767539696) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff_context.net[1].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff_context.net[1].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].ff_context.net[1].training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff_context.net[2], accessed_by=GetItemGuardAccessor(2) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].ff_context.net[2], 140533120374464) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff_context.net[2].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[6].ff_context.net[2].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff_context.net[2].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].ff_context.net[2].training, 140591004393408) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff_context.net[2]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff_context.net[2].lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].ff_context.net[2].lora_A, 140533120377344) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff_context.net[2].lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff_context.net[2].lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].ff_context.net[2].lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff_context.net[2].lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].ff_context.net[2].lora_A['default_0'], 140533120476464) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff_context.net[2].lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff_context.net[2].lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].ff_context.net[2].lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff_context.net[2].lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff_context.net[2].lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].ff_context.net[2].lora_A['default_0'].weight, 140542616454544) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff_context.net[2].lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].ff_context.net[2].lora_B, 140533120376192) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff_context.net[2].lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff_context.net[2].lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].ff_context.net[2].lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff_context.net[2].lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].ff_context.net[2].lora_B['default_0'], 140533120469648) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff_context.net[2].lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff_context.net[2].lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].ff_context.net[2].lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff_context.net[2].base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].ff_context.net[2].base_layer, 140581767539744) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff_context.net[2].base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff_context.net[2].base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].ff_context.net[2].base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff_context.net[2].lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].ff_context.net[2].lora_dropout, 140533120368752) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff_context.net[2].lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff_context.net[2].lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].ff_context.net[2].lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff_context.net[2].lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].ff_context.net[2].lora_dropout['default_0'], 140533120378064) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff_context.net[2].lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff_context.net[2].lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].ff_context.net[2].lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff_context.net[2].scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[6].ff_context.net[2].scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[6].ff_context.net[2].scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff_context.net[2].scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[6].ff_context.net[2].scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff_context.net[2].use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[6].ff_context.net[2].use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[6].ff_context.net[2].use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff_context.net[2].use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].ff_context.net[2].use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff_context.net[2]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff_context.net[2]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff_context.net[2].merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[6].ff_context.net[2].merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[6].ff_context.net[2].merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff_context.net[2]._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].ff_context.net[2]._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff_context.net[2]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff_context.net[2]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff_context.net[2]._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[6].ff_context.net[2]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[6].ff_context.net[2]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff_context._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff_context._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff_context._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].ff_context._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].norm1_context, accessed_by=DictGetItemGuardAccessor(norm1_context) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].norm1_context, 140581767537920) # norm_encoder_hidden_states, c_gate_msa, c_shift_mlp, c_scale_mlp, c_gate_mlp = self.norm1_context( # diffusers/src/diffusers/models/transformers/transformer_flux.py:167 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].norm1_context.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[6].norm1_context.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].norm1_context.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].norm1_context.training, 140591004393440) # norm_encoder_hidden_states, c_gate_msa, c_shift_mlp, c_scale_mlp, c_gate_mlp = self.norm1_context( # diffusers/src/diffusers/models/transformers/transformer_flux.py:167 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].norm1_context.emb, accessed_by=DictGetItemGuardAccessor(emb) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].norm1_context.emb, 140591004478624) # if self.emb is not None: # diffusers/src/diffusers/models/normalization.py:135 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].norm1_context._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].norm1_context.norm, accessed_by=DictGetItemGuardAccessor(norm) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].norm1_context.norm, 140581767538112) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:139 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].norm1_context.norm.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].norm1_context.norm.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].norm1_context.norm.training, 140591004393440) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:139 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].norm1_context.silu, accessed_by=DictGetItemGuardAccessor(silu) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].norm1_context.silu, 140581767538016) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].norm1_context.silu.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].norm1_context.silu.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].norm1_context.silu.training, 140591004393440) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].norm1_context.linear, accessed_by=DictGetItemGuardAccessor(linear) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].norm1_context.linear, 140533120239168) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].norm1_context.linear.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[6].norm1_context.linear.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].norm1_context.linear.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].norm1_context.linear.training, 140591004393408) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].norm1_context.linear._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].norm1_context.linear.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].norm1_context.linear.lora_A, 140533120242048) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].norm1_context.linear.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].norm1_context.linear.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].norm1_context.linear.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].norm1_context.linear.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].norm1_context.linear.lora_A['default_0'], 140533120237824) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].norm1_context.linear.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].norm1_context.linear.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].norm1_context.linear.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].norm1_context.linear.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].norm1_context.linear.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].norm1_context.linear.lora_A['default_0'].weight, 140537668328064) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].norm1_context.linear.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].norm1_context.linear.lora_B, 140533120240080) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].norm1_context.linear.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].norm1_context.linear.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].norm1_context.linear.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].norm1_context.linear.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].norm1_context.linear.lora_B['default_0'], 140533120250688) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].norm1_context.linear.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].norm1_context.linear.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].norm1_context.linear.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].norm1_context.linear.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].norm1_context.linear.base_layer, 140581767538064) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].norm1_context.linear.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].norm1_context.linear.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].norm1_context.linear.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].norm1_context.linear.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].norm1_context.linear.lora_dropout, 140533120246368) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].norm1_context.linear.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].norm1_context.linear.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].norm1_context.linear.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].norm1_context.linear.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].norm1_context.linear.lora_dropout['default_0'], 140533120246224) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].norm1_context.linear.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].norm1_context.linear.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].norm1_context.linear.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].norm1_context.linear.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[6].norm1_context.linear.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[6].norm1_context.linear.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].norm1_context.linear.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[6].norm1_context.linear.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].norm1_context.linear.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[6].norm1_context.linear.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[6].norm1_context.linear.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].norm1_context.linear.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].norm1_context.linear.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].norm1_context.linear._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].norm1_context.linear._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].norm1_context.linear.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[6].norm1_context.linear.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[6].norm1_context.linear.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].norm1_context.linear._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].norm1_context.linear._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].norm1_context.linear._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].norm1_context.linear._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].norm1_context.linear._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[6].norm1_context.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[6].norm1_context.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].norm1_context._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].norm1_context._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].norm1_context._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].norm1_context._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].norm2_context, accessed_by=DictGetItemGuardAccessor(norm2_context) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].norm2_context, 140581767539120) # norm_encoder_hidden_states = self.norm2_context(encoder_hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:195 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].norm2_context.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6].norm2_context.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[6].norm2_context.training, 140591004393440) # norm_encoder_hidden_states = self.norm2_context(encoder_hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:195 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[6]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | +- GuardManager: source=L['self'].transformer_blocks[7], accessed_by=GetItemGuardAccessor(7) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7], 140581767537392) # for index_block, block in enumerate(self.transformer_blocks): # diffusers/src/diffusers/models/transformers/transformer_flux.py:471 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[7].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].training, 140591004393440) # for index_block, block in enumerate(self.transformer_blocks): # diffusers/src/diffusers/models/transformers/transformer_flux.py:471 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff, accessed_by=DictGetItemGuardAccessor(ff) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].ff, 140581767541040) # ff_output = self.ff(norm_hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:185 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[7].ff.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].ff.training, 140591004393440) # ff_output = self.ff(norm_hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:185 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff.net, accessed_by=DictGetItemGuardAccessor(net) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].ff.net, 140581767541280) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[7].ff.net, 93831537618768) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- LENGTH_CHECK: len(L['self'].transformer_blocks[7].ff.net) == 3 # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff.net.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff.net.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].ff.net.training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff.net[0], accessed_by=GetItemGuardAccessor(0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].ff.net[0], 140581767541232) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff.net[0].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[7].ff.net[0].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff.net[0].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].ff.net[0].training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff.net[0]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff.net[0].proj, accessed_by=DictGetItemGuardAccessor(proj) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].ff.net[0].proj, 140533119597152) # hidden_states = self.proj(hidden_states) # diffusers/src/diffusers/models/activations.py:88 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff.net[0].proj.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[7].ff.net[0].proj.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff.net[0].proj.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].ff.net[0].proj.training, 140591004393408) # hidden_states = self.proj(hidden_states) # diffusers/src/diffusers/models/activations.py:88 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff.net[0].proj._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff.net[0].proj.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].ff.net[0].proj.lora_A, 140533119597488) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff.net[0].proj.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff.net[0].proj.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].ff.net[0].proj.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff.net[0].proj.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].ff.net[0].proj.lora_A['default_0'], 140533119591344) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff.net[0].proj.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff.net[0].proj.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].ff.net[0].proj.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff.net[0].proj.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff.net[0].proj.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].ff.net[0].proj.lora_A['default_0'].weight, 140563241214624) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff.net[0].proj.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].ff.net[0].proj.lora_B, 140533119597296) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff.net[0].proj.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff.net[0].proj.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].ff.net[0].proj.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff.net[0].proj.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].ff.net[0].proj.lora_B['default_0'], 140533119597104) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff.net[0].proj.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff.net[0].proj.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].ff.net[0].proj.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff.net[0].proj.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].ff.net[0].proj.base_layer, 140581767541328) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff.net[0].proj.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff.net[0].proj.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].ff.net[0].proj.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff.net[0].proj.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].ff.net[0].proj.lora_dropout, 140533119594608) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff.net[0].proj.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff.net[0].proj.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].ff.net[0].proj.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff.net[0].proj.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].ff.net[0].proj.lora_dropout['default_0'], 140533119597536) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff.net[0].proj.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff.net[0].proj.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].ff.net[0].proj.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff.net[0].proj.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[7].ff.net[0].proj.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[7].ff.net[0].proj.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff.net[0].proj.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[7].ff.net[0].proj.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff.net[0].proj.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[7].ff.net[0].proj.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[7].ff.net[0].proj.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff.net[0].proj.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].ff.net[0].proj.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff.net[0].proj._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff.net[0].proj._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff.net[0].proj.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[7].ff.net[0].proj.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[7].ff.net[0].proj.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff.net[0].proj._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].ff.net[0].proj._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff.net[0].proj._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff.net[0].proj._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff.net[0].proj._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[7].ff.net[0].proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[7].ff.net[0].proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff.net[0].approximate, accessed_by=DictGetItemGuardAccessor(approximate) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[7].ff.net[0].approximate == 'tanh' # return F.gelu(gate, approximate=self.approximate) # diffusers/src/diffusers/models/activations.py:83 in gelu V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff.net[0]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff.net[0]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff.net[0]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff.net[0]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff.net[1], accessed_by=GetItemGuardAccessor(1) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].ff.net[1], 140581767541376) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff.net[1].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff.net[1].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].ff.net[1].training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff.net[2], accessed_by=GetItemGuardAccessor(2) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].ff.net[2], 140533119594512) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff.net[2].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[7].ff.net[2].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff.net[2].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].ff.net[2].training, 140591004393408) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff.net[2]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff.net[2].lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].ff.net[2].lora_A, 140533119584144) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff.net[2].lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff.net[2].lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].ff.net[2].lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff.net[2].lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].ff.net[2].lora_A['default_0'], 140533119585296) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff.net[2].lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff.net[2].lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].ff.net[2].lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff.net[2].lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff.net[2].lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].ff.net[2].lora_A['default_0'].weight, 140563241211424) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff.net[2].lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].ff.net[2].lora_B, 140533119596336) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff.net[2].lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff.net[2].lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].ff.net[2].lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff.net[2].lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].ff.net[2].lora_B['default_0'], 140533119594416) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff.net[2].lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff.net[2].lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].ff.net[2].lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff.net[2].base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].ff.net[2].base_layer, 140581767541424) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff.net[2].base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff.net[2].base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].ff.net[2].base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff.net[2].lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].ff.net[2].lora_dropout, 140533119593216) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff.net[2].lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff.net[2].lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].ff.net[2].lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff.net[2].lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].ff.net[2].lora_dropout['default_0'], 140533119583184) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff.net[2].lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff.net[2].lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].ff.net[2].lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff.net[2].scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[7].ff.net[2].scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[7].ff.net[2].scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff.net[2].scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[7].ff.net[2].scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff.net[2].use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[7].ff.net[2].use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[7].ff.net[2].use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff.net[2].use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].ff.net[2].use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff.net[2]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff.net[2]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff.net[2].merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[7].ff.net[2].merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[7].ff.net[2].merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff.net[2]._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].ff.net[2]._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff.net[2]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff.net[2]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff.net[2]._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[7].ff.net[2]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[7].ff.net[2]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn, accessed_by=DictGetItemGuardAccessor(attn) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn, 140581767540272) # attn_output, context_attn_output = self.attn( # diffusers/src/diffusers/models/transformers/transformer_flux.py:172 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[7].attn.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.training, 140591004393440) # attn_output, context_attn_output = self.attn( # diffusers/src/diffusers/models/transformers/transformer_flux.py:172 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_k, accessed_by=DictGetItemGuardAccessor(to_k) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.to_k, 140533118880304) # key = attn.to_k(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1717 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[7].attn.to_k.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.to_k.training, 140591004393408) # key = attn.to_k(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1717 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_k._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_k.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.to_k.lora_A, 140533121763024) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_k.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_k.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.to_k.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_k.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.to_k.lora_A['default_0'], 140533121765424) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_k.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_k.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.to_k.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_k.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_k.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.to_k.lora_A['default_0'].weight, 140542613136512) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_k.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.to_k.lora_B, 140533121762400) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_k.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_k.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.to_k.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_k.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.to_k.lora_B['default_0'], 140533121763840) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_k.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_k.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.to_k.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_k.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.to_k.base_layer, 140581767540416) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_k.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_k.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.to_k.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_k.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.to_k.lora_dropout, 140533118892160) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_k.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_k.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.to_k.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_k.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.to_k.lora_dropout['default_0'], 140533118887936) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_k.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_k.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.to_k.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_k.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[7].attn.to_k.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[7].attn.to_k.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_k.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[7].attn.to_k.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_k.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[7].attn.to_k.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[7].attn.to_k.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_k.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.to_k.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_k._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_k._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_k.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[7].attn.to_k.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[7].attn.to_k.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_k._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.to_k._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_k._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_k._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_k._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[7].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[7].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_q, accessed_by=DictGetItemGuardAccessor(to_q) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.to_q, 140533118879152) # query = attn.to_q(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1716 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[7].attn.to_q.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.to_q.training, 140591004393408) # query = attn.to_q(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1716 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_q._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_q.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.to_q.lora_A, 140533118883664) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_q.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_q.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.to_q.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_q.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.to_q.lora_A['default_0'], 140533118878480) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_q.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_q.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.to_q.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_q.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_q.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.to_q.lora_A['default_0'].weight, 140542613139872) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_q.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.to_q.lora_B, 140533118890096) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_q.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_q.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.to_q.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_q.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.to_q.lora_B['default_0'], 140533118885968) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_q.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_q.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.to_q.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_q.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.to_q.base_layer, 140581767540512) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_q.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_q.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.to_q.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_q.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.to_q.lora_dropout, 140533118887696) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_q.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_q.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.to_q.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_q.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.to_q.lora_dropout['default_0'], 140533118885824) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_q.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_q.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.to_q.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_q.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[7].attn.to_q.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[7].attn.to_q.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_q.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[7].attn.to_q.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_q.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[7].attn.to_q.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[7].attn.to_q.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_q.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.to_q.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_q._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_q._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_q.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[7].attn.to_q.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[7].attn.to_q.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_q._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.to_q._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_q._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_q._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_q._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[7].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[7].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_v, accessed_by=DictGetItemGuardAccessor(to_v) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.to_v, 140533121767536) # value = attn.to_v(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1718 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_v.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[7].attn.to_v.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_v.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.to_v.training, 140591004393408) # value = attn.to_v(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1718 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_v._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_v.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.to_v.lora_A, 140533121772768) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_v.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_v.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.to_v.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_v.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.to_v.lora_A['default_0'], 140533121766576) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_v.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_v.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.to_v.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_v.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_v.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.to_v.lora_A['default_0'].weight, 140537665312528) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_v.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.to_v.lora_B, 140533121770704) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_v.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_v.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.to_v.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_v.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.to_v.lora_B['default_0'], 140533121762688) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_v.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_v.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.to_v.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_v.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.to_v.base_layer, 140581767540608) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_v.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_v.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.to_v.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_v.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.to_v.lora_dropout, 140533121764512) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_v.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_v.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.to_v.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_v.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.to_v.lora_dropout['default_0'], 140533121765856) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_v.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_v.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.to_v.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_v.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[7].attn.to_v.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[7].attn.to_v.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_v.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[7].attn.to_v.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_v.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[7].attn.to_v.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[7].attn.to_v.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_v.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.to_v.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_v._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_v._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_v.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[7].attn.to_v.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[7].attn.to_v.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_v._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.to_v._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_v._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_v._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_v._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[7].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[7].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.norm_k, accessed_by=DictGetItemGuardAccessor(norm_k) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.norm_k, 140581767540464) # if attn.norm_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1729 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.norm_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[7].attn.norm_k.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.norm_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.norm_k.training, 140591004393440) # if attn.norm_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1729 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.norm_k.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[7].attn.norm_k.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.norm_k._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.norm_k.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.norm_k.weight, 140581785355344) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.norm_k._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.norm_k._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.norm_k._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.norm_k._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.norm_q, accessed_by=DictGetItemGuardAccessor(norm_q) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.norm_q, 140581767540368) # if attn.norm_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1727 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.norm_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[7].attn.norm_q.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.norm_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.norm_q.training, 140591004393440) # if attn.norm_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1727 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.norm_q.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[7].attn.norm_q.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.norm_q._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.norm_q.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.norm_q.weight, 140581773232464) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.norm_q._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.norm_q._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.norm_q._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.norm_q._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_out, accessed_by=DictGetItemGuardAccessor(to_out) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.to_out, 140581767540800) # hidden_states = attn.to_out[0](hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1776 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_out.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_out.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.to_out.training, 140591004393440) # hidden_states = attn.to_out[0](hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1776 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_out[0], accessed_by=GetItemGuardAccessor(0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.to_out[0], 140533118898416) # hidden_states = attn.to_out[0](hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1776 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_out[0].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[7].attn.to_out[0].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_out[0].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.to_out[0].training, 140591004393408) # hidden_states = attn.to_out[0](hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1776 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_out[0]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_out[0].lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.to_out[0].lora_A, 140533118900192) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_out[0].lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_out[0].lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.to_out[0].lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_out[0].lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.to_out[0].lora_A['default_0'], 140533118905904) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_out[0].lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_out[0].lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.to_out[0].lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_out[0].lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_out[0].lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.to_out[0].lora_A['default_0'].weight, 140563241205904) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_out[0].lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.to_out[0].lora_B, 140533118903552) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_out[0].lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_out[0].lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.to_out[0].lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_out[0].lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.to_out[0].lora_B['default_0'], 140533118908448) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_out[0].lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_out[0].lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.to_out[0].lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_out[0].base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.to_out[0].base_layer, 140581767540848) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_out[0].base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_out[0].base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.to_out[0].base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_out[0].lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.to_out[0].lora_dropout, 140533118904992) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_out[0].lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_out[0].lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.to_out[0].lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_out[0].lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.to_out[0].lora_dropout['default_0'], 140533118901296) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_out[0].lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_out[0].lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.to_out[0].lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_out[0].scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[7].attn.to_out[0].scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[7].attn.to_out[0].scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_out[0].scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[7].attn.to_out[0].scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_out[0].use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[7].attn.to_out[0].use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[7].attn.to_out[0].use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_out[0].use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.to_out[0].use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_out[0]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_out[0]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_out[0].merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[7].attn.to_out[0].merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[7].attn.to_out[0].merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_out[0]._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.to_out[0]._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_out[0]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_out[0]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_out[0]._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[7].attn.to_out[0]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[7].attn.to_out[0]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_out[1], accessed_by=GetItemGuardAccessor(1) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.to_out[1], 140581767540896) # hidden_states = attn.to_out[1](hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1778 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_out[1].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_out[1].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.to_out[1].training, 140591004393440) # hidden_states = attn.to_out[1](hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1778 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.add_k_proj, accessed_by=DictGetItemGuardAccessor(add_k_proj) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.add_k_proj, 140533118865600) # encoder_hidden_states_key_proj = attn.add_k_proj(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1736 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.add_k_proj.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[7].attn.add_k_proj.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.add_k_proj.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.add_k_proj.training, 140591004393408) # encoder_hidden_states_key_proj = attn.add_k_proj(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1736 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.add_k_proj._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.add_k_proj.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.add_k_proj.lora_A, 140533118865648) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.add_k_proj.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.add_k_proj.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.add_k_proj.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.add_k_proj.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.add_k_proj.lora_A['default_0'], 140533118871408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.add_k_proj.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.add_k_proj.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.add_k_proj.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.add_k_proj.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.add_k_proj.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.add_k_proj.lora_A['default_0'].weight, 140537665309888) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.add_k_proj.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.add_k_proj.lora_B, 140533118871504) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.add_k_proj.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.add_k_proj.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.add_k_proj.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.add_k_proj.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.add_k_proj.lora_B['default_0'], 140533118870736) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.add_k_proj.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.add_k_proj.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.add_k_proj.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.add_k_proj.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.add_k_proj.base_layer, 140581767540656) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.add_k_proj.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.add_k_proj.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.add_k_proj.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.add_k_proj.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.add_k_proj.lora_dropout, 140533118877408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.add_k_proj.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.add_k_proj.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.add_k_proj.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.add_k_proj.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.add_k_proj.lora_dropout['default_0'], 140533118875488) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.add_k_proj.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.add_k_proj.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.add_k_proj.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.add_k_proj.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[7].attn.add_k_proj.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[7].attn.add_k_proj.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.add_k_proj.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[7].attn.add_k_proj.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.add_k_proj.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[7].attn.add_k_proj.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[7].attn.add_k_proj.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.add_k_proj.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.add_k_proj.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.add_k_proj._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.add_k_proj._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.add_k_proj.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[7].attn.add_k_proj.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[7].attn.add_k_proj.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.add_k_proj._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.add_k_proj._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.add_k_proj._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.add_k_proj._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.add_k_proj._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[7].attn.add_k_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[7].attn.add_k_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.add_q_proj, accessed_by=DictGetItemGuardAccessor(add_q_proj) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.add_q_proj, 140533119135280) # encoder_hidden_states_query_proj = attn.add_q_proj(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1735 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.add_q_proj.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[7].attn.add_q_proj.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.add_q_proj.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.add_q_proj.training, 140591004393408) # encoder_hidden_states_query_proj = attn.add_q_proj(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1735 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.add_q_proj._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.add_q_proj.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.add_q_proj.lora_A, 140533119125152) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.add_q_proj.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.add_q_proj.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.add_q_proj.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.add_q_proj.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.add_q_proj.lora_A['default_0'], 140533118906336) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.add_q_proj.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.add_q_proj.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.add_q_proj.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.add_q_proj.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.add_q_proj.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.add_q_proj.lora_A['default_0'].weight, 140537665314528) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.add_q_proj.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.add_q_proj.lora_B, 140533119126976) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.add_q_proj.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.add_q_proj.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.add_q_proj.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.add_q_proj.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.add_q_proj.lora_B['default_0'], 140533118908832) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.add_q_proj.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.add_q_proj.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.add_q_proj.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.add_q_proj.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.add_q_proj.base_layer, 140581767540752) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.add_q_proj.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.add_q_proj.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.add_q_proj.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.add_q_proj.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.add_q_proj.lora_dropout, 140533119126544) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.add_q_proj.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.add_q_proj.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.add_q_proj.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.add_q_proj.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.add_q_proj.lora_dropout['default_0'], 140533119133360) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.add_q_proj.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.add_q_proj.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.add_q_proj.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.add_q_proj.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[7].attn.add_q_proj.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[7].attn.add_q_proj.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.add_q_proj.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[7].attn.add_q_proj.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.add_q_proj.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[7].attn.add_q_proj.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[7].attn.add_q_proj.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.add_q_proj.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.add_q_proj.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.add_q_proj._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.add_q_proj._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.add_q_proj.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[7].attn.add_q_proj.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[7].attn.add_q_proj.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.add_q_proj._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.add_q_proj._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.add_q_proj._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.add_q_proj._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.add_q_proj._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[7].attn.add_q_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[7].attn.add_q_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.add_v_proj, accessed_by=DictGetItemGuardAccessor(add_v_proj) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.add_v_proj, 140533120516592) # encoder_hidden_states_value_proj = attn.add_v_proj(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1737 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.add_v_proj.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[7].attn.add_v_proj.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.add_v_proj.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.add_v_proj.training, 140591004393408) # encoder_hidden_states_value_proj = attn.add_v_proj(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1737 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.add_v_proj._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.add_v_proj.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.add_v_proj.lora_A, 140533119123712) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.add_v_proj.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.add_v_proj.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.add_v_proj.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.add_v_proj.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.add_v_proj.lora_A['default_0'], 140533119130288) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.add_v_proj.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.add_v_proj.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.add_v_proj.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.add_v_proj.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.add_v_proj.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.add_v_proj.lora_A['default_0'].weight, 140537665310608) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.add_v_proj.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.add_v_proj.lora_B, 140533119139648) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.add_v_proj.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.add_v_proj.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.add_v_proj.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.add_v_proj.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.add_v_proj.lora_B['default_0'], 140533119134512) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.add_v_proj.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.add_v_proj.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.add_v_proj.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.add_v_proj.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.add_v_proj.base_layer, 140581767540704) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.add_v_proj.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.add_v_proj.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.add_v_proj.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.add_v_proj.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.add_v_proj.lora_dropout, 140533119134800) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.add_v_proj.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.add_v_proj.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.add_v_proj.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.add_v_proj.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.add_v_proj.lora_dropout['default_0'], 140533119134224) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.add_v_proj.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.add_v_proj.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.add_v_proj.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.add_v_proj.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[7].attn.add_v_proj.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[7].attn.add_v_proj.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.add_v_proj.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[7].attn.add_v_proj.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.add_v_proj.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[7].attn.add_v_proj.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[7].attn.add_v_proj.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.add_v_proj.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.add_v_proj.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.add_v_proj._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.add_v_proj._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.add_v_proj.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[7].attn.add_v_proj.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[7].attn.add_v_proj.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.add_v_proj._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.add_v_proj._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.add_v_proj._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.add_v_proj._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.add_v_proj._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[7].attn.add_v_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[7].attn.add_v_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_add_out, accessed_by=DictGetItemGuardAccessor(to_add_out) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.to_add_out, 140533118901392) # encoder_hidden_states = attn.to_add_out(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1779 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_add_out.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[7].attn.to_add_out.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_add_out.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.to_add_out.training, 140591004393408) # encoder_hidden_states = attn.to_add_out(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1779 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_add_out._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_add_out.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.to_add_out.lora_A, 140533118908304) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_add_out.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_add_out.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.to_add_out.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_add_out.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.to_add_out.lora_A['default_0'], 140533119596960) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_add_out.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_add_out.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.to_add_out.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_add_out.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_add_out.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.to_add_out.lora_A['default_0'].weight, 140563241220224) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_add_out.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.to_add_out.lora_B, 140533119596480) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_add_out.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_add_out.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.to_add_out.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_add_out.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.to_add_out.lora_B['default_0'], 140533119595760) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_add_out.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_add_out.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.to_add_out.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_add_out.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.to_add_out.base_layer, 140581767540944) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_add_out.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_add_out.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.to_add_out.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_add_out.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.to_add_out.lora_dropout, 140533118909648) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_add_out.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_add_out.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.to_add_out.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_add_out.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.to_add_out.lora_dropout['default_0'], 140533118905712) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_add_out.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_add_out.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.to_add_out.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_add_out.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[7].attn.to_add_out.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[7].attn.to_add_out.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_add_out.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[7].attn.to_add_out.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_add_out.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[7].attn.to_add_out.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[7].attn.to_add_out.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_add_out.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.to_add_out.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_add_out._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_add_out._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_add_out.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[7].attn.to_add_out.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[7].attn.to_add_out.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_add_out._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.to_add_out._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_add_out._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_add_out._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.to_add_out._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[7].attn.to_add_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[7].attn.to_add_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.norm_added_k, accessed_by=DictGetItemGuardAccessor(norm_added_k) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.norm_added_k, 140581767541088) # if attn.norm_added_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1751 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.norm_added_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[7].attn.norm_added_k.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.norm_added_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.norm_added_k.training, 140591004393440) # if attn.norm_added_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1751 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.norm_added_k.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[7].attn.norm_added_k.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.norm_added_k._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.norm_added_k.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.norm_added_k.weight, 140581765826096) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.norm_added_k._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.norm_added_k._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.norm_added_k._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.norm_added_k._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.norm_added_q, accessed_by=DictGetItemGuardAccessor(norm_added_q) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.norm_added_q, 140581767540992) # if attn.norm_added_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1749 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.norm_added_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[7].attn.norm_added_q.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.norm_added_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.norm_added_q.training, 140591004393440) # if attn.norm_added_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1749 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.norm_added_q.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[7].attn.norm_added_q.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.norm_added_q._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.norm_added_q.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.norm_added_q.weight, 140581772706816) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.norm_added_q._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.norm_added_q._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.norm_added_q._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.norm_added_q._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.heads, accessed_by=DictGetItemGuardAccessor(heads) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[7].attn.heads == 24 # head_dim = inner_dim // attn.heads # diffusers/src/diffusers/models/attention_processor.py:1721 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn.processor, accessed_by=DictGetItemGuardAccessor(processor) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[7].attn.processor, 93831581524080) # attn_parameters = set(inspect.signature(self.processor.__call__).parameters.keys()) # diffusers/src/diffusers/models/attention_processor.py:479 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].attn.processor, 140581767540224) # return self.processor( # diffusers/src/diffusers/models/attention_processor.py:490 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].attn._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].norm1, accessed_by=DictGetItemGuardAccessor(norm1) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].norm1, 140581767539792) # norm_hidden_states, gate_msa, shift_mlp, scale_mlp, gate_mlp = self.norm1(hidden_states, emb=temb) # diffusers/src/diffusers/models/transformers/transformer_flux.py:165 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].norm1.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[7].norm1.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].norm1.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].norm1.training, 140591004393440) # norm_hidden_states, gate_msa, shift_mlp, scale_mlp, gate_mlp = self.norm1(hidden_states, emb=temb) # diffusers/src/diffusers/models/transformers/transformer_flux.py:165 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].norm1.emb, accessed_by=DictGetItemGuardAccessor(emb) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].norm1.emb, 140591004478624) # if self.emb is not None: # diffusers/src/diffusers/models/normalization.py:135 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].norm1._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].norm1.norm, accessed_by=DictGetItemGuardAccessor(norm) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].norm1.norm, 140581767539936) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:139 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].norm1.norm.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].norm1.norm.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].norm1.norm.training, 140591004393440) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:139 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].norm1.silu, accessed_by=DictGetItemGuardAccessor(silu) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].norm1.silu, 140581767539840) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].norm1.silu.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].norm1.silu.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].norm1.silu.training, 140591004393440) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].norm1.linear, accessed_by=DictGetItemGuardAccessor(linear) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].norm1.linear, 140533120480304) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].norm1.linear.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[7].norm1.linear.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].norm1.linear.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].norm1.linear.training, 140591004393408) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].norm1.linear._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].norm1.linear.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].norm1.linear.lora_A, 140533120480400) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].norm1.linear.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].norm1.linear.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].norm1.linear.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].norm1.linear.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].norm1.linear.lora_A['default_0'], 140533120468304) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].norm1.linear.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].norm1.linear.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].norm1.linear.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].norm1.linear.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].norm1.linear.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].norm1.linear.lora_A['default_0'].weight, 140542616460864) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].norm1.linear.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].norm1.linear.lora_B, 140533120481456) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].norm1.linear.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].norm1.linear.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].norm1.linear.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].norm1.linear.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].norm1.linear.lora_B['default_0'], 140533120477472) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].norm1.linear.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].norm1.linear.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].norm1.linear.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].norm1.linear.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].norm1.linear.base_layer, 140581767539888) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].norm1.linear.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].norm1.linear.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].norm1.linear.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].norm1.linear.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].norm1.linear.lora_dropout, 140533120482032) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].norm1.linear.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].norm1.linear.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].norm1.linear.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].norm1.linear.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].norm1.linear.lora_dropout['default_0'], 140533120476512) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].norm1.linear.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].norm1.linear.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].norm1.linear.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].norm1.linear.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[7].norm1.linear.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[7].norm1.linear.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].norm1.linear.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[7].norm1.linear.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].norm1.linear.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[7].norm1.linear.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[7].norm1.linear.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].norm1.linear.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].norm1.linear.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].norm1.linear._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].norm1.linear._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].norm1.linear.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[7].norm1.linear.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[7].norm1.linear.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].norm1.linear._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].norm1.linear._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].norm1.linear._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].norm1.linear._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].norm1.linear._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[7].norm1.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[7].norm1.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].norm1._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].norm1._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].norm1._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].norm1._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].norm2, accessed_by=DictGetItemGuardAccessor(norm2) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].norm2, 140581767541136) # norm_hidden_states = self.norm2(hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:182 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].norm2.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].norm2.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].norm2.training, 140591004393440) # norm_hidden_states = self.norm2(hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:182 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff_context, accessed_by=DictGetItemGuardAccessor(ff_context) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].ff_context, 140581767541472) # context_ff_output = self.ff_context(norm_encoder_hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:198 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff_context.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[7].ff_context.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff_context.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].ff_context.training, 140591004393440) # context_ff_output = self.ff_context(norm_encoder_hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:198 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff_context._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff_context.net, accessed_by=DictGetItemGuardAccessor(net) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].ff_context.net, 140581767541616) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[7].ff_context.net, 93831537618768) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- LENGTH_CHECK: len(L['self'].transformer_blocks[7].ff_context.net) == 3 # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff_context.net.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff_context.net.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].ff_context.net.training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff_context.net[0], accessed_by=GetItemGuardAccessor(0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].ff_context.net[0], 140581767541568) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff_context.net[0].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[7].ff_context.net[0].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff_context.net[0].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].ff_context.net[0].training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff_context.net[0]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff_context.net[0].proj, accessed_by=DictGetItemGuardAccessor(proj) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].ff_context.net[0].proj, 140533119591152) # hidden_states = self.proj(hidden_states) # diffusers/src/diffusers/models/activations.py:88 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff_context.net[0].proj.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[7].ff_context.net[0].proj.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff_context.net[0].proj.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].ff_context.net[0].proj.training, 140591004393408) # hidden_states = self.proj(hidden_states) # diffusers/src/diffusers/models/activations.py:88 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff_context.net[0].proj._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff_context.net[0].proj.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].ff_context.net[0].proj.lora_A, 140533119595520) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff_context.net[0].proj.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff_context.net[0].proj.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].ff_context.net[0].proj.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff_context.net[0].proj.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].ff_context.net[0].proj.lora_A['default_0'], 140533119597776) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff_context.net[0].proj.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff_context.net[0].proj.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].ff_context.net[0].proj.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff_context.net[0].proj.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff_context.net[0].proj.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].ff_context.net[0].proj.lora_A['default_0'].weight, 140563241210384) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff_context.net[0].proj.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].ff_context.net[0].proj.lora_B, 140533119592784) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff_context.net[0].proj.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff_context.net[0].proj.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].ff_context.net[0].proj.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff_context.net[0].proj.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].ff_context.net[0].proj.lora_B['default_0'], 140533119589424) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff_context.net[0].proj.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff_context.net[0].proj.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].ff_context.net[0].proj.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff_context.net[0].proj.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].ff_context.net[0].proj.base_layer, 140581767541664) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff_context.net[0].proj.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff_context.net[0].proj.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].ff_context.net[0].proj.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff_context.net[0].proj.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].ff_context.net[0].proj.lora_dropout, 140533119597968) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff_context.net[0].proj.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff_context.net[0].proj.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].ff_context.net[0].proj.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff_context.net[0].proj.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].ff_context.net[0].proj.lora_dropout['default_0'], 140533119585056) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff_context.net[0].proj.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff_context.net[0].proj.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].ff_context.net[0].proj.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff_context.net[0].proj.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[7].ff_context.net[0].proj.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[7].ff_context.net[0].proj.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff_context.net[0].proj.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[7].ff_context.net[0].proj.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff_context.net[0].proj.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[7].ff_context.net[0].proj.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[7].ff_context.net[0].proj.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff_context.net[0].proj.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].ff_context.net[0].proj.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff_context.net[0].proj._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff_context.net[0].proj._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff_context.net[0].proj.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[7].ff_context.net[0].proj.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[7].ff_context.net[0].proj.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff_context.net[0].proj._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].ff_context.net[0].proj._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff_context.net[0].proj._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff_context.net[0].proj._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff_context.net[0].proj._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[7].ff_context.net[0].proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[7].ff_context.net[0].proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff_context.net[0].approximate, accessed_by=DictGetItemGuardAccessor(approximate) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[7].ff_context.net[0].approximate == 'tanh' # return F.gelu(gate, approximate=self.approximate) # diffusers/src/diffusers/models/activations.py:83 in gelu V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff_context.net[0]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff_context.net[0]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff_context.net[0]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff_context.net[0]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff_context.net[1], accessed_by=GetItemGuardAccessor(1) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].ff_context.net[1], 140581767541760) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff_context.net[1].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff_context.net[1].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].ff_context.net[1].training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff_context.net[2], accessed_by=GetItemGuardAccessor(2) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].ff_context.net[2], 140533119582608) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff_context.net[2].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[7].ff_context.net[2].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff_context.net[2].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].ff_context.net[2].training, 140591004393408) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff_context.net[2]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff_context.net[2].lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].ff_context.net[2].lora_A, 140533119448784) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff_context.net[2].lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff_context.net[2].lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].ff_context.net[2].lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff_context.net[2].lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].ff_context.net[2].lora_A['default_0'], 140533119441920) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff_context.net[2].lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff_context.net[2].lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].ff_context.net[2].lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff_context.net[2].lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff_context.net[2].lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].ff_context.net[2].lora_A['default_0'].weight, 140542605751328) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff_context.net[2].lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].ff_context.net[2].lora_B, 140533119438416) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff_context.net[2].lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff_context.net[2].lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].ff_context.net[2].lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff_context.net[2].lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].ff_context.net[2].lora_B['default_0'], 140533119438368) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff_context.net[2].lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff_context.net[2].lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].ff_context.net[2].lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff_context.net[2].base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].ff_context.net[2].base_layer, 140581767541808) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff_context.net[2].base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff_context.net[2].base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].ff_context.net[2].base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff_context.net[2].lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].ff_context.net[2].lora_dropout, 140533119584576) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff_context.net[2].lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff_context.net[2].lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].ff_context.net[2].lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff_context.net[2].lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].ff_context.net[2].lora_dropout['default_0'], 140533119597584) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff_context.net[2].lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff_context.net[2].lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].ff_context.net[2].lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff_context.net[2].scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[7].ff_context.net[2].scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[7].ff_context.net[2].scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff_context.net[2].scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[7].ff_context.net[2].scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff_context.net[2].use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[7].ff_context.net[2].use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[7].ff_context.net[2].use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff_context.net[2].use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].ff_context.net[2].use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff_context.net[2]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff_context.net[2]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff_context.net[2].merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[7].ff_context.net[2].merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[7].ff_context.net[2].merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff_context.net[2]._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].ff_context.net[2]._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff_context.net[2]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff_context.net[2]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff_context.net[2]._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[7].ff_context.net[2]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[7].ff_context.net[2]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff_context._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff_context._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff_context._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].ff_context._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].norm1_context, accessed_by=DictGetItemGuardAccessor(norm1_context) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].norm1_context, 140581767539984) # norm_encoder_hidden_states, c_gate_msa, c_shift_mlp, c_scale_mlp, c_gate_mlp = self.norm1_context( # diffusers/src/diffusers/models/transformers/transformer_flux.py:167 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].norm1_context.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[7].norm1_context.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].norm1_context.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].norm1_context.training, 140591004393440) # norm_encoder_hidden_states, c_gate_msa, c_shift_mlp, c_scale_mlp, c_gate_mlp = self.norm1_context( # diffusers/src/diffusers/models/transformers/transformer_flux.py:167 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].norm1_context.emb, accessed_by=DictGetItemGuardAccessor(emb) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].norm1_context.emb, 140591004478624) # if self.emb is not None: # diffusers/src/diffusers/models/normalization.py:135 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].norm1_context._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].norm1_context.norm, accessed_by=DictGetItemGuardAccessor(norm) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].norm1_context.norm, 140581767540176) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:139 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].norm1_context.norm.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].norm1_context.norm.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].norm1_context.norm.training, 140591004393440) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:139 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].norm1_context.silu, accessed_by=DictGetItemGuardAccessor(silu) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].norm1_context.silu, 140581767540080) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].norm1_context.silu.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].norm1_context.silu.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].norm1_context.silu.training, 140591004393440) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].norm1_context.linear, accessed_by=DictGetItemGuardAccessor(linear) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].norm1_context.linear, 140533120481216) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].norm1_context.linear.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[7].norm1_context.linear.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].norm1_context.linear.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].norm1_context.linear.training, 140591004393408) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].norm1_context.linear._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].norm1_context.linear.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].norm1_context.linear.lora_A, 140533119387632) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].norm1_context.linear.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].norm1_context.linear.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].norm1_context.linear.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].norm1_context.linear.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].norm1_context.linear.lora_A['default_0'], 140533118886016) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].norm1_context.linear.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].norm1_context.linear.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].norm1_context.linear.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].norm1_context.linear.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].norm1_context.linear.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].norm1_context.linear.lora_A['default_0'].weight, 140542613131632) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].norm1_context.linear.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].norm1_context.linear.lora_B, 140533119400496) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].norm1_context.linear.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].norm1_context.linear.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].norm1_context.linear.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].norm1_context.linear.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].norm1_context.linear.lora_B['default_0'], 140533118891392) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].norm1_context.linear.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].norm1_context.linear.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].norm1_context.linear.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].norm1_context.linear.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].norm1_context.linear.base_layer, 140581767540128) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].norm1_context.linear.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].norm1_context.linear.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].norm1_context.linear.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].norm1_context.linear.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].norm1_context.linear.lora_dropout, 140533120481264) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].norm1_context.linear.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].norm1_context.linear.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].norm1_context.linear.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].norm1_context.linear.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].norm1_context.linear.lora_dropout['default_0'], 140533120481984) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].norm1_context.linear.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].norm1_context.linear.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].norm1_context.linear.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].norm1_context.linear.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[7].norm1_context.linear.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[7].norm1_context.linear.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].norm1_context.linear.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[7].norm1_context.linear.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].norm1_context.linear.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[7].norm1_context.linear.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[7].norm1_context.linear.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].norm1_context.linear.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].norm1_context.linear.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].norm1_context.linear._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].norm1_context.linear._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].norm1_context.linear.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[7].norm1_context.linear.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[7].norm1_context.linear.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].norm1_context.linear._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].norm1_context.linear._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].norm1_context.linear._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].norm1_context.linear._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].norm1_context.linear._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[7].norm1_context.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[7].norm1_context.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].norm1_context._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].norm1_context._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].norm1_context._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].norm1_context._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].norm2_context, accessed_by=DictGetItemGuardAccessor(norm2_context) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].norm2_context, 140581767541184) # norm_encoder_hidden_states = self.norm2_context(encoder_hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:195 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].norm2_context.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7].norm2_context.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[7].norm2_context.training, 140591004393440) # norm_encoder_hidden_states = self.norm2_context(encoder_hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:195 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[7]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | +- GuardManager: source=L['self'].transformer_blocks[8], accessed_by=GetItemGuardAccessor(8) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8], 140581767539456) # for index_block, block in enumerate(self.transformer_blocks): # diffusers/src/diffusers/models/transformers/transformer_flux.py:471 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[8].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].training, 140591004393440) # for index_block, block in enumerate(self.transformer_blocks): # diffusers/src/diffusers/models/transformers/transformer_flux.py:471 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff, accessed_by=DictGetItemGuardAccessor(ff) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].ff, 140581767543104) # ff_output = self.ff(norm_hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:185 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[8].ff.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].ff.training, 140591004393440) # ff_output = self.ff(norm_hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:185 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff.net, accessed_by=DictGetItemGuardAccessor(net) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].ff.net, 140581767543344) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[8].ff.net, 93831537618768) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- LENGTH_CHECK: len(L['self'].transformer_blocks[8].ff.net) == 3 # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff.net.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff.net.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].ff.net.training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff.net[0], accessed_by=GetItemGuardAccessor(0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].ff.net[0], 140581767543296) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff.net[0].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[8].ff.net[0].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff.net[0].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].ff.net[0].training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff.net[0]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff.net[0].proj, accessed_by=DictGetItemGuardAccessor(proj) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].ff.net[0].proj, 140533119033904) # hidden_states = self.proj(hidden_states) # diffusers/src/diffusers/models/activations.py:88 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff.net[0].proj.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[8].ff.net[0].proj.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff.net[0].proj.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].ff.net[0].proj.training, 140591004393408) # hidden_states = self.proj(hidden_states) # diffusers/src/diffusers/models/activations.py:88 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff.net[0].proj._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff.net[0].proj.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].ff.net[0].proj.lora_A, 140533119036544) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff.net[0].proj.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff.net[0].proj.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].ff.net[0].proj.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff.net[0].proj.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].ff.net[0].proj.lora_A['default_0'], 140533119027616) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff.net[0].proj.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff.net[0].proj.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].ff.net[0].proj.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff.net[0].proj.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff.net[0].proj.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].ff.net[0].proj.lora_A['default_0'].weight, 140542619920768) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff.net[0].proj.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].ff.net[0].proj.lora_B, 140533119031456) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff.net[0].proj.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff.net[0].proj.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].ff.net[0].proj.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff.net[0].proj.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].ff.net[0].proj.lora_B['default_0'], 140533119039568) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff.net[0].proj.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff.net[0].proj.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].ff.net[0].proj.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff.net[0].proj.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].ff.net[0].proj.base_layer, 140581767543392) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff.net[0].proj.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff.net[0].proj.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].ff.net[0].proj.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff.net[0].proj.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].ff.net[0].proj.lora_dropout, 140533119028864) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff.net[0].proj.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff.net[0].proj.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].ff.net[0].proj.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff.net[0].proj.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].ff.net[0].proj.lora_dropout['default_0'], 140533119040384) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff.net[0].proj.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff.net[0].proj.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].ff.net[0].proj.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff.net[0].proj.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[8].ff.net[0].proj.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[8].ff.net[0].proj.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff.net[0].proj.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[8].ff.net[0].proj.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff.net[0].proj.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[8].ff.net[0].proj.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[8].ff.net[0].proj.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff.net[0].proj.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].ff.net[0].proj.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff.net[0].proj._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff.net[0].proj._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff.net[0].proj.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[8].ff.net[0].proj.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[8].ff.net[0].proj.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff.net[0].proj._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].ff.net[0].proj._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff.net[0].proj._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff.net[0].proj._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff.net[0].proj._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[8].ff.net[0].proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[8].ff.net[0].proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff.net[0].approximate, accessed_by=DictGetItemGuardAccessor(approximate) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[8].ff.net[0].approximate == 'tanh' # return F.gelu(gate, approximate=self.approximate) # diffusers/src/diffusers/models/activations.py:83 in gelu V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff.net[0]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff.net[0]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff.net[0]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff.net[0]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff.net[1], accessed_by=GetItemGuardAccessor(1) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].ff.net[1], 140581767543440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff.net[1].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff.net[1].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].ff.net[1].training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff.net[2], accessed_by=GetItemGuardAccessor(2) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].ff.net[2], 140533119040720) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff.net[2].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[8].ff.net[2].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff.net[2].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].ff.net[2].training, 140591004393408) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff.net[2]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff.net[2].lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].ff.net[2].lora_A, 140533119147584) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff.net[2].lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff.net[2].lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].ff.net[2].lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff.net[2].lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].ff.net[2].lora_A['default_0'], 140533119143648) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff.net[2].lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff.net[2].lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].ff.net[2].lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff.net[2].lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff.net[2].lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].ff.net[2].lora_A['default_0'].weight, 140542619925808) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff.net[2].lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].ff.net[2].lora_B, 140533119151952) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff.net[2].lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff.net[2].lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].ff.net[2].lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff.net[2].lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].ff.net[2].lora_B['default_0'], 140533119148832) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff.net[2].lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff.net[2].lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].ff.net[2].lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff.net[2].base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].ff.net[2].base_layer, 140581767543488) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff.net[2].base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff.net[2].base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].ff.net[2].base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff.net[2].lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].ff.net[2].lora_dropout, 140533119155744) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff.net[2].lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff.net[2].lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].ff.net[2].lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff.net[2].lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].ff.net[2].lora_dropout['default_0'], 140533119036736) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff.net[2].lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff.net[2].lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].ff.net[2].lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff.net[2].scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[8].ff.net[2].scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[8].ff.net[2].scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff.net[2].scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[8].ff.net[2].scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff.net[2].use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[8].ff.net[2].use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[8].ff.net[2].use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff.net[2].use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].ff.net[2].use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff.net[2]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff.net[2]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff.net[2].merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[8].ff.net[2].merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[8].ff.net[2].merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff.net[2]._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].ff.net[2]._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff.net[2]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff.net[2]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff.net[2]._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[8].ff.net[2]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[8].ff.net[2]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn, accessed_by=DictGetItemGuardAccessor(attn) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn, 140581767542336) # attn_output, context_attn_output = self.attn( # diffusers/src/diffusers/models/transformers/transformer_flux.py:172 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[8].attn.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.training, 140591004393440) # attn_output, context_attn_output = self.attn( # diffusers/src/diffusers/models/transformers/transformer_flux.py:172 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_k, accessed_by=DictGetItemGuardAccessor(to_k) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.to_k, 140533119327040) # key = attn.to_k(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1717 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[8].attn.to_k.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.to_k.training, 140591004393408) # key = attn.to_k(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1717 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_k._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_k.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.to_k.lora_A, 140533119326464) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_k.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_k.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.to_k.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_k.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.to_k.lora_A['default_0'], 140533119325984) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_k.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_k.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.to_k.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_k.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_k.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.to_k.lora_A['default_0'].weight, 140537665890608) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_k.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.to_k.lora_B, 140533119327328) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_k.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_k.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.to_k.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_k.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.to_k.lora_B['default_0'], 140533119326800) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_k.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_k.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.to_k.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_k.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.to_k.base_layer, 140581767542480) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_k.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_k.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.to_k.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_k.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.to_k.lora_dropout, 140533119333712) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_k.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_k.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.to_k.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_k.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.to_k.lora_dropout['default_0'], 140533119325840) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_k.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_k.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.to_k.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_k.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[8].attn.to_k.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[8].attn.to_k.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_k.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[8].attn.to_k.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_k.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[8].attn.to_k.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[8].attn.to_k.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_k.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.to_k.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_k._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_k._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_k.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[8].attn.to_k.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[8].attn.to_k.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_k._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.to_k._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_k._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_k._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_k._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[8].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[8].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_q, accessed_by=DictGetItemGuardAccessor(to_q) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.to_q, 140533119328240) # query = attn.to_q(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1716 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[8].attn.to_q.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.to_q.training, 140591004393408) # query = attn.to_q(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1716 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_q._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_q.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.to_q.lora_A, 140533119330208) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_q.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_q.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.to_q.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_q.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.to_q.lora_A['default_0'], 140533119326368) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_q.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_q.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.to_q.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_q.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_q.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.to_q.lora_A['default_0'].weight, 140537665888608) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_q.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.to_q.lora_B, 140533119328048) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_q.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_q.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.to_q.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_q.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.to_q.lora_B['default_0'], 140533119328432) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_q.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_q.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.to_q.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_q.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.to_q.base_layer, 140581767542576) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_q.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_q.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.to_q.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_q.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.to_q.lora_dropout, 140533119327472) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_q.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_q.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.to_q.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_q.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.to_q.lora_dropout['default_0'], 140533119330304) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_q.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_q.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.to_q.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_q.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[8].attn.to_q.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[8].attn.to_q.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_q.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[8].attn.to_q.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_q.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[8].attn.to_q.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[8].attn.to_q.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_q.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.to_q.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_q._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_q._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_q.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[8].attn.to_q.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[8].attn.to_q.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_q._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.to_q._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_q._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_q._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_q._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[8].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[8].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_v, accessed_by=DictGetItemGuardAccessor(to_v) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.to_v, 140533119325600) # value = attn.to_v(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1718 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_v.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[8].attn.to_v.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_v.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.to_v.training, 140591004393408) # value = attn.to_v(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1718 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_v._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_v.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.to_v.lora_A, 140533119329152) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_v.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_v.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.to_v.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_v.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.to_v.lora_A['default_0'], 140533119772528) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_v.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_v.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.to_v.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_v.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_v.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.to_v.lora_A['default_0'].weight, 140537665885728) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_v.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.to_v.lora_B, 140533119324928) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_v.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_v.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.to_v.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_v.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.to_v.lora_B['default_0'], 140533119773872) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_v.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_v.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.to_v.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_v.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.to_v.base_layer, 140581767542672) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_v.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_v.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.to_v.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_v.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.to_v.lora_dropout, 140533119326656) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_v.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_v.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.to_v.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_v.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.to_v.lora_dropout['default_0'], 140533119322336) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_v.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_v.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.to_v.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_v.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[8].attn.to_v.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[8].attn.to_v.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_v.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[8].attn.to_v.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_v.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[8].attn.to_v.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[8].attn.to_v.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_v.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.to_v.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_v._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_v._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_v.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[8].attn.to_v.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[8].attn.to_v.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_v._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.to_v._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_v._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_v._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_v._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[8].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[8].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.norm_k, accessed_by=DictGetItemGuardAccessor(norm_k) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.norm_k, 140581767542528) # if attn.norm_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1729 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.norm_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[8].attn.norm_k.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.norm_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.norm_k.training, 140591004393440) # if attn.norm_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1729 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.norm_k.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[8].attn.norm_k.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.norm_k._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.norm_k.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.norm_k.weight, 140581774129120) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.norm_k._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.norm_k._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.norm_k._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.norm_k._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.norm_q, accessed_by=DictGetItemGuardAccessor(norm_q) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.norm_q, 140581767542432) # if attn.norm_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1727 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.norm_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[8].attn.norm_q.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.norm_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.norm_q.training, 140591004393440) # if attn.norm_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1727 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.norm_q.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[8].attn.norm_q.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.norm_q._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.norm_q.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.norm_q.weight, 140581772718176) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.norm_q._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.norm_q._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.norm_q._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.norm_q._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_out, accessed_by=DictGetItemGuardAccessor(to_out) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.to_out, 140581767542864) # hidden_states = attn.to_out[0](hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1776 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_out.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_out.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.to_out.training, 140591004393440) # hidden_states = attn.to_out[0](hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1776 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_out[0], accessed_by=GetItemGuardAccessor(0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.to_out[0], 140533119036880) # hidden_states = attn.to_out[0](hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1776 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_out[0].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[8].attn.to_out[0].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_out[0].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.to_out[0].training, 140591004393408) # hidden_states = attn.to_out[0](hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1776 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_out[0]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_out[0].lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.to_out[0].lora_A, 140533119039184) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_out[0].lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_out[0].lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.to_out[0].lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_out[0].lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.to_out[0].lora_A['default_0'], 140533119035632) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_out[0].lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_out[0].lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.to_out[0].lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_out[0].lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_out[0].lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.to_out[0].lora_A['default_0'].weight, 140542607165456) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_out[0].lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.to_out[0].lora_B, 140533119037840) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_out[0].lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_out[0].lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.to_out[0].lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_out[0].lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.to_out[0].lora_B['default_0'], 140533119038416) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_out[0].lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_out[0].lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.to_out[0].lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_out[0].base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.to_out[0].base_layer, 140581767542912) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_out[0].base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_out[0].base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.to_out[0].base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_out[0].lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.to_out[0].lora_dropout, 140533119038752) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_out[0].lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_out[0].lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.to_out[0].lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_out[0].lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.to_out[0].lora_dropout['default_0'], 140533119039280) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_out[0].lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_out[0].lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.to_out[0].lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_out[0].scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[8].attn.to_out[0].scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[8].attn.to_out[0].scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_out[0].scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[8].attn.to_out[0].scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_out[0].use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[8].attn.to_out[0].use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[8].attn.to_out[0].use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_out[0].use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.to_out[0].use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_out[0]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_out[0]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_out[0].merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[8].attn.to_out[0].merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[8].attn.to_out[0].merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_out[0]._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.to_out[0]._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_out[0]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_out[0]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_out[0]._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[8].attn.to_out[0]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[8].attn.to_out[0]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_out[1], accessed_by=GetItemGuardAccessor(1) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.to_out[1], 140581767542960) # hidden_states = attn.to_out[1](hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1778 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_out[1].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_out[1].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.to_out[1].training, 140591004393440) # hidden_states = attn.to_out[1](hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1778 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.add_k_proj, accessed_by=DictGetItemGuardAccessor(add_k_proj) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.add_k_proj, 140533118877504) # encoder_hidden_states_key_proj = attn.add_k_proj(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1736 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.add_k_proj.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[8].attn.add_k_proj.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.add_k_proj.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.add_k_proj.training, 140591004393408) # encoder_hidden_states_key_proj = attn.add_k_proj(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1736 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.add_k_proj._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.add_k_proj.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.add_k_proj.lora_A, 140533118941072) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.add_k_proj.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.add_k_proj.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.add_k_proj.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.add_k_proj.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.add_k_proj.lora_A['default_0'], 140533118930848) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.add_k_proj.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.add_k_proj.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.add_k_proj.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.add_k_proj.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.add_k_proj.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.add_k_proj.lora_A['default_0'].weight, 140537665884128) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.add_k_proj.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.add_k_proj.lora_B, 140533118932720) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.add_k_proj.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.add_k_proj.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.add_k_proj.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.add_k_proj.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.add_k_proj.lora_B['default_0'], 140533118930992) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.add_k_proj.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.add_k_proj.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.add_k_proj.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.add_k_proj.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.add_k_proj.base_layer, 140581767542720) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.add_k_proj.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.add_k_proj.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.add_k_proj.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.add_k_proj.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.add_k_proj.lora_dropout, 140533118935024) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.add_k_proj.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.add_k_proj.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.add_k_proj.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.add_k_proj.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.add_k_proj.lora_dropout['default_0'], 140533118927632) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.add_k_proj.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.add_k_proj.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.add_k_proj.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.add_k_proj.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[8].attn.add_k_proj.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[8].attn.add_k_proj.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.add_k_proj.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[8].attn.add_k_proj.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.add_k_proj.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[8].attn.add_k_proj.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[8].attn.add_k_proj.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.add_k_proj.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.add_k_proj.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.add_k_proj._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.add_k_proj._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.add_k_proj.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[8].attn.add_k_proj.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[8].attn.add_k_proj.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.add_k_proj._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.add_k_proj._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.add_k_proj._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.add_k_proj._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.add_k_proj._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[8].attn.add_k_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[8].attn.add_k_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.add_q_proj, accessed_by=DictGetItemGuardAccessor(add_q_proj) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.add_q_proj, 140533118939296) # encoder_hidden_states_query_proj = attn.add_q_proj(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1735 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.add_q_proj.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[8].attn.add_q_proj.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.add_q_proj.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.add_q_proj.training, 140591004393408) # encoder_hidden_states_query_proj = attn.add_q_proj(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1735 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.add_q_proj._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.add_q_proj.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.add_q_proj.lora_A, 140533118934400) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.add_q_proj.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.add_q_proj.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.add_q_proj.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.add_q_proj.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.add_q_proj.lora_A['default_0'], 140533118939440) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.add_q_proj.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.add_q_proj.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.add_q_proj.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.add_q_proj.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.add_q_proj.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.add_q_proj.lora_A['default_0'].weight, 140542607173056) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.add_q_proj.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.add_q_proj.lora_B, 140533118931136) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.add_q_proj.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.add_q_proj.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.add_q_proj.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.add_q_proj.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.add_q_proj.lora_B['default_0'], 140533118935504) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.add_q_proj.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.add_q_proj.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.add_q_proj.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.add_q_proj.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.add_q_proj.base_layer, 140581767542816) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.add_q_proj.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.add_q_proj.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.add_q_proj.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.add_q_proj.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.add_q_proj.lora_dropout, 140533118939344) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.add_q_proj.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.add_q_proj.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.add_q_proj.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.add_q_proj.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.add_q_proj.lora_dropout['default_0'], 140533118939584) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.add_q_proj.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.add_q_proj.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.add_q_proj.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.add_q_proj.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[8].attn.add_q_proj.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[8].attn.add_q_proj.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.add_q_proj.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[8].attn.add_q_proj.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.add_q_proj.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[8].attn.add_q_proj.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[8].attn.add_q_proj.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.add_q_proj.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.add_q_proj.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.add_q_proj._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.add_q_proj._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.add_q_proj.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[8].attn.add_q_proj.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[8].attn.add_q_proj.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.add_q_proj._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.add_q_proj._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.add_q_proj._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.add_q_proj._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.add_q_proj._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[8].attn.add_q_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[8].attn.add_q_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.add_v_proj, accessed_by=DictGetItemGuardAccessor(add_v_proj) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.add_v_proj, 140533118928256) # encoder_hidden_states_value_proj = attn.add_v_proj(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1737 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.add_v_proj.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[8].attn.add_v_proj.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.add_v_proj.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.add_v_proj.training, 140591004393408) # encoder_hidden_states_value_proj = attn.add_v_proj(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1737 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.add_v_proj._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.add_v_proj.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.add_v_proj.lora_A, 140533118931664) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.add_v_proj.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.add_v_proj.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.add_v_proj.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.add_v_proj.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.add_v_proj.lora_A['default_0'], 140533118939968) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.add_v_proj.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.add_v_proj.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.add_v_proj.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.add_v_proj.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.add_v_proj.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.add_v_proj.lora_A['default_0'].weight, 140542607169056) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.add_v_proj.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.add_v_proj.lora_B, 140533118932576) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.add_v_proj.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.add_v_proj.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.add_v_proj.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.add_v_proj.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.add_v_proj.lora_B['default_0'], 140533118930560) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.add_v_proj.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.add_v_proj.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.add_v_proj.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.add_v_proj.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.add_v_proj.base_layer, 140581767542768) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.add_v_proj.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.add_v_proj.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.add_v_proj.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.add_v_proj.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.add_v_proj.lora_dropout, 140533118931712) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.add_v_proj.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.add_v_proj.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.add_v_proj.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.add_v_proj.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.add_v_proj.lora_dropout['default_0'], 140533118929312) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.add_v_proj.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.add_v_proj.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.add_v_proj.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.add_v_proj.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[8].attn.add_v_proj.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[8].attn.add_v_proj.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.add_v_proj.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[8].attn.add_v_proj.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.add_v_proj.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[8].attn.add_v_proj.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[8].attn.add_v_proj.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.add_v_proj.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.add_v_proj.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.add_v_proj._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.add_v_proj._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.add_v_proj.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[8].attn.add_v_proj.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[8].attn.add_v_proj.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.add_v_proj._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.add_v_proj._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.add_v_proj._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.add_v_proj._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.add_v_proj._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[8].attn.add_v_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[8].attn.add_v_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_add_out, accessed_by=DictGetItemGuardAccessor(to_add_out) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.to_add_out, 140533119038512) # encoder_hidden_states = attn.to_add_out(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1779 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_add_out.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[8].attn.to_add_out.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_add_out.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.to_add_out.training, 140591004393408) # encoder_hidden_states = attn.to_add_out(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1779 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_add_out._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_add_out.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.to_add_out.lora_A, 140533119037024) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_add_out.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_add_out.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.to_add_out.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_add_out.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.to_add_out.lora_A['default_0'], 140533119040672) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_add_out.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_add_out.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.to_add_out.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_add_out.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_add_out.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.to_add_out.lora_A['default_0'].weight, 140542619916128) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_add_out.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.to_add_out.lora_B, 140533119038848) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_add_out.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_add_out.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.to_add_out.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_add_out.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.to_add_out.lora_B['default_0'], 140533119037120) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_add_out.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_add_out.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.to_add_out.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_add_out.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.to_add_out.base_layer, 140581767543008) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_add_out.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_add_out.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.to_add_out.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_add_out.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.to_add_out.lora_dropout, 140533119034240) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_add_out.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_add_out.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.to_add_out.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_add_out.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.to_add_out.lora_dropout['default_0'], 140533119038656) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_add_out.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_add_out.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.to_add_out.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_add_out.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[8].attn.to_add_out.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[8].attn.to_add_out.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_add_out.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[8].attn.to_add_out.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_add_out.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[8].attn.to_add_out.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[8].attn.to_add_out.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_add_out.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.to_add_out.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_add_out._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_add_out._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_add_out.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[8].attn.to_add_out.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[8].attn.to_add_out.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_add_out._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.to_add_out._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_add_out._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_add_out._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.to_add_out._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[8].attn.to_add_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[8].attn.to_add_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.norm_added_k, accessed_by=DictGetItemGuardAccessor(norm_added_k) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.norm_added_k, 140581767543152) # if attn.norm_added_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1751 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.norm_added_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[8].attn.norm_added_k.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.norm_added_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.norm_added_k.training, 140591004393440) # if attn.norm_added_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1751 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.norm_added_k.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[8].attn.norm_added_k.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.norm_added_k._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.norm_added_k.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.norm_added_k.weight, 140581772720576) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.norm_added_k._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.norm_added_k._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.norm_added_k._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.norm_added_k._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.norm_added_q, accessed_by=DictGetItemGuardAccessor(norm_added_q) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.norm_added_q, 140581767543056) # if attn.norm_added_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1749 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.norm_added_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[8].attn.norm_added_q.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.norm_added_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.norm_added_q.training, 140591004393440) # if attn.norm_added_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1749 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.norm_added_q.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[8].attn.norm_added_q.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.norm_added_q._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.norm_added_q.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.norm_added_q.weight, 140581773245744) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.norm_added_q._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.norm_added_q._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.norm_added_q._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.norm_added_q._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.heads, accessed_by=DictGetItemGuardAccessor(heads) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[8].attn.heads == 24 # head_dim = inner_dim // attn.heads # diffusers/src/diffusers/models/attention_processor.py:1721 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn.processor, accessed_by=DictGetItemGuardAccessor(processor) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[8].attn.processor, 93831581524080) # attn_parameters = set(inspect.signature(self.processor.__call__).parameters.keys()) # diffusers/src/diffusers/models/attention_processor.py:479 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].attn.processor, 140581767542288) # return self.processor( # diffusers/src/diffusers/models/attention_processor.py:490 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].attn._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].norm1, accessed_by=DictGetItemGuardAccessor(norm1) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].norm1, 140581767541856) # norm_hidden_states, gate_msa, shift_mlp, scale_mlp, gate_mlp = self.norm1(hidden_states, emb=temb) # diffusers/src/diffusers/models/transformers/transformer_flux.py:165 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].norm1.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[8].norm1.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].norm1.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].norm1.training, 140591004393440) # norm_hidden_states, gate_msa, shift_mlp, scale_mlp, gate_mlp = self.norm1(hidden_states, emb=temb) # diffusers/src/diffusers/models/transformers/transformer_flux.py:165 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].norm1.emb, accessed_by=DictGetItemGuardAccessor(emb) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].norm1.emb, 140591004478624) # if self.emb is not None: # diffusers/src/diffusers/models/normalization.py:135 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].norm1._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].norm1.norm, accessed_by=DictGetItemGuardAccessor(norm) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].norm1.norm, 140581767542000) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:139 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].norm1.norm.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].norm1.norm.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].norm1.norm.training, 140591004393440) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:139 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].norm1.silu, accessed_by=DictGetItemGuardAccessor(silu) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].norm1.silu, 140581767541904) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].norm1.silu.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].norm1.silu.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].norm1.silu.training, 140591004393440) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].norm1.linear, accessed_by=DictGetItemGuardAccessor(linear) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].norm1.linear, 140533119448592) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].norm1.linear.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[8].norm1.linear.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].norm1.linear.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].norm1.linear.training, 140591004393408) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].norm1.linear._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].norm1.linear.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].norm1.linear.lora_A, 140533119442496) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].norm1.linear.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].norm1.linear.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].norm1.linear.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].norm1.linear.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].norm1.linear.lora_A['default_0'], 140533119443600) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].norm1.linear.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].norm1.linear.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].norm1.linear.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].norm1.linear.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].norm1.linear.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].norm1.linear.lora_A['default_0'].weight, 140542605747088) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].norm1.linear.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].norm1.linear.lora_B, 140533119448448) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].norm1.linear.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].norm1.linear.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].norm1.linear.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].norm1.linear.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].norm1.linear.lora_B['default_0'], 140533119444896) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].norm1.linear.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].norm1.linear.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].norm1.linear.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].norm1.linear.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].norm1.linear.base_layer, 140581767541952) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].norm1.linear.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].norm1.linear.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].norm1.linear.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].norm1.linear.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].norm1.linear.lora_dropout, 140533119443984) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].norm1.linear.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].norm1.linear.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].norm1.linear.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].norm1.linear.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].norm1.linear.lora_dropout['default_0'], 140533119443120) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].norm1.linear.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].norm1.linear.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].norm1.linear.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].norm1.linear.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[8].norm1.linear.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[8].norm1.linear.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].norm1.linear.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[8].norm1.linear.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].norm1.linear.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[8].norm1.linear.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[8].norm1.linear.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].norm1.linear.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].norm1.linear.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].norm1.linear._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].norm1.linear._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].norm1.linear.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[8].norm1.linear.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[8].norm1.linear.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].norm1.linear._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].norm1.linear._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].norm1.linear._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].norm1.linear._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].norm1.linear._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[8].norm1.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[8].norm1.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].norm1._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].norm1._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].norm1._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].norm1._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].norm2, accessed_by=DictGetItemGuardAccessor(norm2) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].norm2, 140581767543200) # norm_hidden_states = self.norm2(hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:182 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].norm2.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].norm2.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].norm2.training, 140591004393440) # norm_hidden_states = self.norm2(hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:182 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff_context, accessed_by=DictGetItemGuardAccessor(ff_context) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].ff_context, 140581767543536) # context_ff_output = self.ff_context(norm_encoder_hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:198 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff_context.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[8].ff_context.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff_context.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].ff_context.training, 140591004393440) # context_ff_output = self.ff_context(norm_encoder_hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:198 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff_context._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff_context.net, accessed_by=DictGetItemGuardAccessor(net) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].ff_context.net, 140581767543680) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[8].ff_context.net, 93831537618768) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- LENGTH_CHECK: len(L['self'].transformer_blocks[8].ff_context.net) == 3 # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff_context.net.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff_context.net.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].ff_context.net.training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff_context.net[0], accessed_by=GetItemGuardAccessor(0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].ff_context.net[0], 140581767543632) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff_context.net[0].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[8].ff_context.net[0].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff_context.net[0].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].ff_context.net[0].training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff_context.net[0]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff_context.net[0].proj, accessed_by=DictGetItemGuardAccessor(proj) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].ff_context.net[0].proj, 140533119153536) # hidden_states = self.proj(hidden_states) # diffusers/src/diffusers/models/activations.py:88 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff_context.net[0].proj.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[8].ff_context.net[0].proj.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff_context.net[0].proj.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].ff_context.net[0].proj.training, 140591004393408) # hidden_states = self.proj(hidden_states) # diffusers/src/diffusers/models/activations.py:88 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff_context.net[0].proj._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff_context.net[0].proj.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].ff_context.net[0].proj.lora_A, 140533119148208) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff_context.net[0].proj.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff_context.net[0].proj.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].ff_context.net[0].proj.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff_context.net[0].proj.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].ff_context.net[0].proj.lora_A['default_0'], 140533119719680) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff_context.net[0].proj.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff_context.net[0].proj.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].ff_context.net[0].proj.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff_context.net[0].proj.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff_context.net[0].proj.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].ff_context.net[0].proj.lora_A['default_0'].weight, 140542618893856) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff_context.net[0].proj.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].ff_context.net[0].proj.lora_B, 140533119146720) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff_context.net[0].proj.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff_context.net[0].proj.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].ff_context.net[0].proj.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff_context.net[0].proj.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].ff_context.net[0].proj.lora_B['default_0'], 140533119725872) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff_context.net[0].proj.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff_context.net[0].proj.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].ff_context.net[0].proj.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff_context.net[0].proj.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].ff_context.net[0].proj.base_layer, 140581767543728) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff_context.net[0].proj.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff_context.net[0].proj.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].ff_context.net[0].proj.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff_context.net[0].proj.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].ff_context.net[0].proj.lora_dropout, 140533119151136) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff_context.net[0].proj.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff_context.net[0].proj.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].ff_context.net[0].proj.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff_context.net[0].proj.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].ff_context.net[0].proj.lora_dropout['default_0'], 140533119150176) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff_context.net[0].proj.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff_context.net[0].proj.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].ff_context.net[0].proj.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff_context.net[0].proj.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[8].ff_context.net[0].proj.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[8].ff_context.net[0].proj.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff_context.net[0].proj.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[8].ff_context.net[0].proj.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff_context.net[0].proj.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[8].ff_context.net[0].proj.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[8].ff_context.net[0].proj.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff_context.net[0].proj.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].ff_context.net[0].proj.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff_context.net[0].proj._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff_context.net[0].proj._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff_context.net[0].proj.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[8].ff_context.net[0].proj.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[8].ff_context.net[0].proj.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff_context.net[0].proj._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].ff_context.net[0].proj._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff_context.net[0].proj._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff_context.net[0].proj._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff_context.net[0].proj._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[8].ff_context.net[0].proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[8].ff_context.net[0].proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff_context.net[0].approximate, accessed_by=DictGetItemGuardAccessor(approximate) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[8].ff_context.net[0].approximate == 'tanh' # return F.gelu(gate, approximate=self.approximate) # diffusers/src/diffusers/models/activations.py:83 in gelu V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff_context.net[0]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff_context.net[0]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff_context.net[0]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff_context.net[0]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff_context.net[1], accessed_by=GetItemGuardAccessor(1) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].ff_context.net[1], 140581767543824) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff_context.net[1].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff_context.net[1].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].ff_context.net[1].training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff_context.net[2], accessed_by=GetItemGuardAccessor(2) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].ff_context.net[2], 140533119723616) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff_context.net[2].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[8].ff_context.net[2].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff_context.net[2].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].ff_context.net[2].training, 140591004393408) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff_context.net[2]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff_context.net[2].lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].ff_context.net[2].lora_A, 140533119719968) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff_context.net[2].lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff_context.net[2].lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].ff_context.net[2].lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff_context.net[2].lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].ff_context.net[2].lora_A['default_0'], 140533119270336) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff_context.net[2].lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff_context.net[2].lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].ff_context.net[2].lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff_context.net[2].lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff_context.net[2].lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].ff_context.net[2].lora_A['default_0'].weight, 140542618894176) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff_context.net[2].lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].ff_context.net[2].lora_B, 140533119716560) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff_context.net[2].lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff_context.net[2].lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].ff_context.net[2].lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff_context.net[2].lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].ff_context.net[2].lora_B['default_0'], 140533119268896) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff_context.net[2].lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff_context.net[2].lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].ff_context.net[2].lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff_context.net[2].base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].ff_context.net[2].base_layer, 140581767543872) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff_context.net[2].base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff_context.net[2].base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].ff_context.net[2].base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff_context.net[2].lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].ff_context.net[2].lora_dropout, 140533119714208) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff_context.net[2].lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff_context.net[2].lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].ff_context.net[2].lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff_context.net[2].lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].ff_context.net[2].lora_dropout['default_0'], 140533119727504) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff_context.net[2].lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff_context.net[2].lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].ff_context.net[2].lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff_context.net[2].scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[8].ff_context.net[2].scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[8].ff_context.net[2].scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff_context.net[2].scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[8].ff_context.net[2].scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff_context.net[2].use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[8].ff_context.net[2].use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[8].ff_context.net[2].use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff_context.net[2].use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].ff_context.net[2].use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff_context.net[2]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff_context.net[2]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff_context.net[2].merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[8].ff_context.net[2].merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[8].ff_context.net[2].merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff_context.net[2]._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].ff_context.net[2]._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff_context.net[2]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff_context.net[2]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff_context.net[2]._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[8].ff_context.net[2]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[8].ff_context.net[2]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff_context._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff_context._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff_context._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].ff_context._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].norm1_context, accessed_by=DictGetItemGuardAccessor(norm1_context) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].norm1_context, 140581767542048) # norm_encoder_hidden_states, c_gate_msa, c_shift_mlp, c_scale_mlp, c_gate_mlp = self.norm1_context( # diffusers/src/diffusers/models/transformers/transformer_flux.py:167 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].norm1_context.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[8].norm1_context.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].norm1_context.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].norm1_context.training, 140591004393440) # norm_encoder_hidden_states, c_gate_msa, c_shift_mlp, c_scale_mlp, c_gate_mlp = self.norm1_context( # diffusers/src/diffusers/models/transformers/transformer_flux.py:167 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].norm1_context.emb, accessed_by=DictGetItemGuardAccessor(emb) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].norm1_context.emb, 140591004478624) # if self.emb is not None: # diffusers/src/diffusers/models/normalization.py:135 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].norm1_context._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].norm1_context.norm, accessed_by=DictGetItemGuardAccessor(norm) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].norm1_context.norm, 140581767542240) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:139 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].norm1_context.norm.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].norm1_context.norm.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].norm1_context.norm.training, 140591004393440) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:139 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].norm1_context.silu, accessed_by=DictGetItemGuardAccessor(silu) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].norm1_context.silu, 140581767542144) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].norm1_context.silu.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].norm1_context.silu.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].norm1_context.silu.training, 140591004393440) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].norm1_context.linear, accessed_by=DictGetItemGuardAccessor(linear) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].norm1_context.linear, 140533119438032) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].norm1_context.linear.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[8].norm1_context.linear.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].norm1_context.linear.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].norm1_context.linear.training, 140591004393408) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].norm1_context.linear._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].norm1_context.linear.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].norm1_context.linear.lora_A, 140533119447872) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].norm1_context.linear.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].norm1_context.linear.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].norm1_context.linear.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].norm1_context.linear.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].norm1_context.linear.lora_A['default_0'], 140533119328720) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].norm1_context.linear.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].norm1_context.linear.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].norm1_context.linear.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].norm1_context.linear.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].norm1_context.linear.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].norm1_context.linear.lora_A['default_0'].weight, 140537665891968) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].norm1_context.linear.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].norm1_context.linear.lora_B, 140533119327136) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].norm1_context.linear.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].norm1_context.linear.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].norm1_context.linear.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].norm1_context.linear.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].norm1_context.linear.lora_B['default_0'], 140533119329728) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].norm1_context.linear.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].norm1_context.linear.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].norm1_context.linear.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].norm1_context.linear.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].norm1_context.linear.base_layer, 140581767542192) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].norm1_context.linear.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].norm1_context.linear.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].norm1_context.linear.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].norm1_context.linear.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].norm1_context.linear.lora_dropout, 140533119442448) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].norm1_context.linear.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].norm1_context.linear.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].norm1_context.linear.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].norm1_context.linear.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].norm1_context.linear.lora_dropout['default_0'], 140533119442928) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].norm1_context.linear.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].norm1_context.linear.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].norm1_context.linear.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].norm1_context.linear.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[8].norm1_context.linear.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[8].norm1_context.linear.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].norm1_context.linear.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[8].norm1_context.linear.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].norm1_context.linear.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[8].norm1_context.linear.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[8].norm1_context.linear.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].norm1_context.linear.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].norm1_context.linear.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].norm1_context.linear._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].norm1_context.linear._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].norm1_context.linear.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[8].norm1_context.linear.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[8].norm1_context.linear.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].norm1_context.linear._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].norm1_context.linear._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].norm1_context.linear._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].norm1_context.linear._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].norm1_context.linear._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[8].norm1_context.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[8].norm1_context.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].norm1_context._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].norm1_context._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].norm1_context._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].norm1_context._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].norm2_context, accessed_by=DictGetItemGuardAccessor(norm2_context) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].norm2_context, 140581767543248) # norm_encoder_hidden_states = self.norm2_context(encoder_hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:195 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].norm2_context.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8].norm2_context.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[8].norm2_context.training, 140591004393440) # norm_encoder_hidden_states = self.norm2_context(encoder_hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:195 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[8]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | +- GuardManager: source=L['self'].transformer_blocks[9], accessed_by=GetItemGuardAccessor(9) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9], 140581767541520) # for index_block, block in enumerate(self.transformer_blocks): # diffusers/src/diffusers/models/transformers/transformer_flux.py:471 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[9].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].training, 140591004393440) # for index_block, block in enumerate(self.transformer_blocks): # diffusers/src/diffusers/models/transformers/transformer_flux.py:471 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff, accessed_by=DictGetItemGuardAccessor(ff) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].ff, 140581769888144) # ff_output = self.ff(norm_hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:185 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[9].ff.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].ff.training, 140591004393440) # ff_output = self.ff(norm_hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:185 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff.net, accessed_by=DictGetItemGuardAccessor(net) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].ff.net, 140581769888384) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[9].ff.net, 93831537618768) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- LENGTH_CHECK: len(L['self'].transformer_blocks[9].ff.net) == 3 # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff.net.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff.net.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].ff.net.training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff.net[0], accessed_by=GetItemGuardAccessor(0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].ff.net[0], 140581769888336) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff.net[0].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[9].ff.net[0].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff.net[0].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].ff.net[0].training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff.net[0]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff.net[0].proj, accessed_by=DictGetItemGuardAccessor(proj) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].ff.net[0].proj, 140533119406752) # hidden_states = self.proj(hidden_states) # diffusers/src/diffusers/models/activations.py:88 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff.net[0].proj.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[9].ff.net[0].proj.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff.net[0].proj.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].ff.net[0].proj.training, 140591004393408) # hidden_states = self.proj(hidden_states) # diffusers/src/diffusers/models/activations.py:88 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff.net[0].proj._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff.net[0].proj.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].ff.net[0].proj.lora_A, 140533119413136) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff.net[0].proj.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff.net[0].proj.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].ff.net[0].proj.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff.net[0].proj.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].ff.net[0].proj.lora_A['default_0'], 140533119218592) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff.net[0].proj.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff.net[0].proj.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].ff.net[0].proj.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff.net[0].proj.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff.net[0].proj.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].ff.net[0].proj.lora_A['default_0'].weight, 140542619060080) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff.net[0].proj.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].ff.net[0].proj.lora_B, 140533119407136) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff.net[0].proj.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff.net[0].proj.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].ff.net[0].proj.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff.net[0].proj.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].ff.net[0].proj.lora_B['default_0'], 140533119206112) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff.net[0].proj.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff.net[0].proj.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].ff.net[0].proj.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff.net[0].proj.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].ff.net[0].proj.base_layer, 140581769888432) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff.net[0].proj.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff.net[0].proj.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].ff.net[0].proj.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff.net[0].proj.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].ff.net[0].proj.lora_dropout, 140533119403968) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff.net[0].proj.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff.net[0].proj.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].ff.net[0].proj.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff.net[0].proj.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].ff.net[0].proj.lora_dropout['default_0'], 140533119405504) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff.net[0].proj.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff.net[0].proj.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].ff.net[0].proj.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff.net[0].proj.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[9].ff.net[0].proj.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[9].ff.net[0].proj.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff.net[0].proj.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[9].ff.net[0].proj.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff.net[0].proj.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[9].ff.net[0].proj.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[9].ff.net[0].proj.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff.net[0].proj.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].ff.net[0].proj.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff.net[0].proj._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff.net[0].proj._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff.net[0].proj.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[9].ff.net[0].proj.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[9].ff.net[0].proj.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff.net[0].proj._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].ff.net[0].proj._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff.net[0].proj._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff.net[0].proj._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff.net[0].proj._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[9].ff.net[0].proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[9].ff.net[0].proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff.net[0].approximate, accessed_by=DictGetItemGuardAccessor(approximate) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[9].ff.net[0].approximate == 'tanh' # return F.gelu(gate, approximate=self.approximate) # diffusers/src/diffusers/models/activations.py:83 in gelu V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff.net[0]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff.net[0]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff.net[0]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff.net[0]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff.net[1], accessed_by=GetItemGuardAccessor(1) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].ff.net[1], 140581769888480) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff.net[1].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff.net[1].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].ff.net[1].training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff.net[2], accessed_by=GetItemGuardAccessor(2) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].ff.net[2], 140533119206400) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff.net[2].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[9].ff.net[2].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff.net[2].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].ff.net[2].training, 140591004393408) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff.net[2]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff.net[2].lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].ff.net[2].lora_A, 140533119215952) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff.net[2].lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff.net[2].lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].ff.net[2].lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff.net[2].lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].ff.net[2].lora_A['default_0'], 140533117811200) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff.net[2].lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff.net[2].lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].ff.net[2].lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff.net[2].lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff.net[2].lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].ff.net[2].lora_A['default_0'].weight, 140542619064880) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff.net[2].lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].ff.net[2].lora_B, 140533117799776) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff.net[2].lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff.net[2].lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].ff.net[2].lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff.net[2].lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].ff.net[2].lora_B['default_0'], 140533117798048) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff.net[2].lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff.net[2].lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].ff.net[2].lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff.net[2].base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].ff.net[2].base_layer, 140581769888528) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff.net[2].base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff.net[2].base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].ff.net[2].base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff.net[2].lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].ff.net[2].lora_dropout, 140533119209904) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff.net[2].lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff.net[2].lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].ff.net[2].lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff.net[2].lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].ff.net[2].lora_dropout['default_0'], 140533119209808) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff.net[2].lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff.net[2].lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].ff.net[2].lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff.net[2].scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[9].ff.net[2].scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[9].ff.net[2].scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff.net[2].scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[9].ff.net[2].scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff.net[2].use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[9].ff.net[2].use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[9].ff.net[2].use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff.net[2].use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].ff.net[2].use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff.net[2]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff.net[2]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff.net[2].merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[9].ff.net[2].merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[9].ff.net[2].merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff.net[2]._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].ff.net[2]._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff.net[2]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff.net[2]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff.net[2]._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[9].ff.net[2]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[9].ff.net[2]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn, accessed_by=DictGetItemGuardAccessor(attn) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn, 140581767544400) # attn_output, context_attn_output = self.attn( # diffusers/src/diffusers/models/transformers/transformer_flux.py:172 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[9].attn.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.training, 140591004393440) # attn_output, context_attn_output = self.attn( # diffusers/src/diffusers/models/transformers/transformer_flux.py:172 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_k, accessed_by=DictGetItemGuardAccessor(to_k) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.to_k, 140533119266544) # key = attn.to_k(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1717 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[9].attn.to_k.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.to_k.training, 140591004393408) # key = attn.to_k(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1717 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_k._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_k.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.to_k.lora_A, 140533119815344) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_k.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_k.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.to_k.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_k.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.to_k.lora_A['default_0'], 140533119821632) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_k.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_k.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.to_k.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_k.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_k.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.to_k.lora_A['default_0'].weight, 140542608500000) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_k.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.to_k.lora_B, 140533119812896) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_k.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_k.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.to_k.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_k.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.to_k.lora_B['default_0'], 140533119819088) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_k.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_k.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.to_k.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_k.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.to_k.base_layer, 140581767544544) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_k.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_k.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.to_k.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_k.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.to_k.lora_dropout, 140533119812608) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_k.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_k.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.to_k.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_k.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.to_k.lora_dropout['default_0'], 140533119821248) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_k.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_k.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.to_k.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_k.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[9].attn.to_k.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[9].attn.to_k.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_k.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[9].attn.to_k.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_k.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[9].attn.to_k.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[9].attn.to_k.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_k.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.to_k.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_k._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_k._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_k.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[9].attn.to_k.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[9].attn.to_k.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_k._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.to_k._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_k._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_k._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_k._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[9].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[9].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_q, accessed_by=DictGetItemGuardAccessor(to_q) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.to_q, 140533119261840) # query = attn.to_q(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1716 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[9].attn.to_q.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.to_q.training, 140591004393408) # query = attn.to_q(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1716 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_q._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_q.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.to_q.lora_A, 140533119263952) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_q.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_q.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.to_q.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_q.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.to_q.lora_A['default_0'], 140533119262992) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_q.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_q.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.to_q.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_q.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_q.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.to_q.lora_A['default_0'].weight, 140542618890176) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_q.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.to_q.lora_B, 140533119270384) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_q.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_q.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.to_q.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_q.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.to_q.lora_B['default_0'], 140533119266400) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_q.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_q.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.to_q.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_q.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.to_q.base_layer, 140581767544640) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_q.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_q.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.to_q.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_q.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.to_q.lora_dropout, 140533119263328) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_q.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_q.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.to_q.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_q.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.to_q.lora_dropout['default_0'], 140533119266064) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_q.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_q.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.to_q.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_q.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[9].attn.to_q.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[9].attn.to_q.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_q.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[9].attn.to_q.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_q.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[9].attn.to_q.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[9].attn.to_q.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_q.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.to_q.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_q._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_q._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_q.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[9].attn.to_q.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[9].attn.to_q.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_q._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.to_q._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_q._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_q._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_q._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[9].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[9].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_v, accessed_by=DictGetItemGuardAccessor(to_v) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.to_v, 140533119822304) # value = attn.to_v(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1718 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_v.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[9].attn.to_v.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_v.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.to_v.training, 140591004393408) # value = attn.to_v(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1718 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_v._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_v.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.to_v.lora_A, 140533119816880) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_v.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_v.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.to_v.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_v.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.to_v.lora_A['default_0'], 140533119817360) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_v.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_v.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.to_v.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_v.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_v.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.to_v.lora_A['default_0'].weight, 140542608503600) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_v.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.to_v.lora_B, 140533119827056) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_v.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_v.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.to_v.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_v.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.to_v.lora_B['default_0'], 140533119820336) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_v.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_v.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.to_v.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_v.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.to_v.base_layer, 140581767544736) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_v.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_v.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.to_v.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_v.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.to_v.lora_dropout, 140533119825856) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_v.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_v.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.to_v.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_v.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.to_v.lora_dropout['default_0'], 140533119818080) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_v.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_v.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.to_v.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_v.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[9].attn.to_v.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[9].attn.to_v.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_v.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[9].attn.to_v.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_v.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[9].attn.to_v.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[9].attn.to_v.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_v.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.to_v.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_v._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_v._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_v.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[9].attn.to_v.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[9].attn.to_v.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_v._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.to_v._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_v._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_v._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_v._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[9].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[9].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.norm_k, accessed_by=DictGetItemGuardAccessor(norm_k) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.norm_k, 140581767544592) # if attn.norm_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1729 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.norm_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[9].attn.norm_k.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.norm_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.norm_k.training, 140591004393440) # if attn.norm_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1729 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.norm_k.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[9].attn.norm_k.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.norm_k._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.norm_k.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.norm_k.weight, 140581765826736) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.norm_k._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.norm_k._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.norm_k._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.norm_k._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.norm_q, accessed_by=DictGetItemGuardAccessor(norm_q) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.norm_q, 140581767544496) # if attn.norm_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1727 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.norm_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[9].attn.norm_q.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.norm_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.norm_q.training, 140591004393440) # if attn.norm_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1727 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.norm_q.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[9].attn.norm_q.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.norm_q._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.norm_q.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.norm_q.weight, 140581772717536) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.norm_q._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.norm_q._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.norm_q._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.norm_q._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_out, accessed_by=DictGetItemGuardAccessor(to_out) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.to_out, 140581769887904) # hidden_states = attn.to_out[0](hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1776 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_out.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_out.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.to_out.training, 140591004393440) # hidden_states = attn.to_out[0](hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1776 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_out[0], accessed_by=GetItemGuardAccessor(0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.to_out[0], 140533119175744) # hidden_states = attn.to_out[0](hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1776 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_out[0].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[9].attn.to_out[0].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_out[0].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.to_out[0].training, 140591004393408) # hidden_states = attn.to_out[0](hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1776 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_out[0]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_out[0].lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.to_out[0].lora_A, 140533119185056) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_out[0].lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_out[0].lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.to_out[0].lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_out[0].lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.to_out[0].lora_A['default_0'], 140533119175936) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_out[0].lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_out[0].lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.to_out[0].lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_out[0].lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_out[0].lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.to_out[0].lora_A['default_0'].weight, 140542619064480) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_out[0].lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.to_out[0].lora_B, 140533119186736) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_out[0].lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_out[0].lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.to_out[0].lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_out[0].lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.to_out[0].lora_B['default_0'], 140533119187696) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_out[0].lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_out[0].lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.to_out[0].lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_out[0].base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.to_out[0].base_layer, 140581769887952) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_out[0].base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_out[0].base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.to_out[0].base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_out[0].lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.to_out[0].lora_dropout, 140533119183184) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_out[0].lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_out[0].lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.to_out[0].lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_out[0].lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.to_out[0].lora_dropout['default_0'], 140533119173056) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_out[0].lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_out[0].lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.to_out[0].lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_out[0].scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[9].attn.to_out[0].scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[9].attn.to_out[0].scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_out[0].scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[9].attn.to_out[0].scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_out[0].use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[9].attn.to_out[0].use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[9].attn.to_out[0].use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_out[0].use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.to_out[0].use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_out[0]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_out[0]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_out[0].merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[9].attn.to_out[0].merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[9].attn.to_out[0].merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_out[0]._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.to_out[0]._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_out[0]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_out[0]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_out[0]._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[9].attn.to_out[0]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[9].attn.to_out[0]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_out[1], accessed_by=GetItemGuardAccessor(1) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.to_out[1], 140581769888000) # hidden_states = attn.to_out[1](hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1778 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_out[1].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_out[1].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.to_out[1].training, 140591004393440) # hidden_states = attn.to_out[1](hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1778 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.add_k_proj, accessed_by=DictGetItemGuardAccessor(add_k_proj) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.add_k_proj, 140533119827728) # encoder_hidden_states_key_proj = attn.add_k_proj(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1736 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.add_k_proj.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[9].attn.add_k_proj.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.add_k_proj.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.add_k_proj.training, 140591004393408) # encoder_hidden_states_key_proj = attn.add_k_proj(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1736 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.add_k_proj._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.add_k_proj.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.add_k_proj.lora_A, 140533119001472) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.add_k_proj.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.add_k_proj.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.add_k_proj.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.add_k_proj.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.add_k_proj.lora_A['default_0'], 140533119007184) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.add_k_proj.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.add_k_proj.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.add_k_proj.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.add_k_proj.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.add_k_proj.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.add_k_proj.lora_A['default_0'].weight, 140542608497040) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.add_k_proj.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.add_k_proj.lora_B, 140533119004496) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.add_k_proj.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.add_k_proj.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.add_k_proj.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.add_k_proj.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.add_k_proj.lora_B['default_0'], 140533119001616) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.add_k_proj.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.add_k_proj.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.add_k_proj.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.add_k_proj.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.add_k_proj.base_layer, 140581767544784) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.add_k_proj.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.add_k_proj.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.add_k_proj.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.add_k_proj.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.add_k_proj.lora_dropout, 140533119818272) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.add_k_proj.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.add_k_proj.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.add_k_proj.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.add_k_proj.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.add_k_proj.lora_dropout['default_0'], 140533119812752) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.add_k_proj.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.add_k_proj.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.add_k_proj.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.add_k_proj.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[9].attn.add_k_proj.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[9].attn.add_k_proj.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.add_k_proj.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[9].attn.add_k_proj.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.add_k_proj.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[9].attn.add_k_proj.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[9].attn.add_k_proj.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.add_k_proj.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.add_k_proj.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.add_k_proj._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.add_k_proj._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.add_k_proj.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[9].attn.add_k_proj.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[9].attn.add_k_proj.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.add_k_proj._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.add_k_proj._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.add_k_proj._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.add_k_proj._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.add_k_proj._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[9].attn.add_k_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[9].attn.add_k_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.add_q_proj, accessed_by=DictGetItemGuardAccessor(add_q_proj) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.add_q_proj, 140533119008576) # encoder_hidden_states_query_proj = attn.add_q_proj(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1735 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.add_q_proj.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[9].attn.add_q_proj.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.add_q_proj.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.add_q_proj.training, 140591004393408) # encoder_hidden_states_query_proj = attn.add_q_proj(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1735 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.add_q_proj._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.add_q_proj.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.add_q_proj.lora_A, 140533119002432) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.add_q_proj.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.add_q_proj.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.add_q_proj.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.add_q_proj.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.add_q_proj.lora_A['default_0'], 140533119177952) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.add_q_proj.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.add_q_proj.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.add_q_proj.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.add_q_proj.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.add_q_proj.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.add_q_proj.lora_A['default_0'].weight, 140542619063440) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.add_q_proj.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.add_q_proj.lora_B, 140533119006944) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.add_q_proj.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.add_q_proj.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.add_q_proj.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.add_q_proj.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.add_q_proj.lora_B['default_0'], 140533119178864) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.add_q_proj.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.add_q_proj.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.add_q_proj.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.add_q_proj.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.add_q_proj.base_layer, 140581769887856) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.add_q_proj.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.add_q_proj.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.add_q_proj.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.add_q_proj.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.add_q_proj.lora_dropout, 140533119008672) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.add_q_proj.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.add_q_proj.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.add_q_proj.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.add_q_proj.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.add_q_proj.lora_dropout['default_0'], 140533119007136) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.add_q_proj.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.add_q_proj.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.add_q_proj.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.add_q_proj.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[9].attn.add_q_proj.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[9].attn.add_q_proj.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.add_q_proj.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[9].attn.add_q_proj.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.add_q_proj.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[9].attn.add_q_proj.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[9].attn.add_q_proj.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.add_q_proj.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.add_q_proj.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.add_q_proj._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.add_q_proj._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.add_q_proj.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[9].attn.add_q_proj.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[9].attn.add_q_proj.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.add_q_proj._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.add_q_proj._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.add_q_proj._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.add_q_proj._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.add_q_proj._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[9].attn.add_q_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[9].attn.add_q_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.add_v_proj, accessed_by=DictGetItemGuardAccessor(add_v_proj) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.add_v_proj, 140533119001376) # encoder_hidden_states_value_proj = attn.add_v_proj(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1737 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.add_v_proj.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[9].attn.add_v_proj.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.add_v_proj.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.add_v_proj.training, 140591004393408) # encoder_hidden_states_value_proj = attn.add_v_proj(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1737 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.add_v_proj._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.add_v_proj.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.add_v_proj.lora_A, 140533118994608) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.add_v_proj.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.add_v_proj.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.add_v_proj.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.add_v_proj.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.add_v_proj.lora_A['default_0'], 140533119004112) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.add_v_proj.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.add_v_proj.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.add_v_proj.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.add_v_proj.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.add_v_proj.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.add_v_proj.lora_A['default_0'].weight, 140542608504880) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.add_v_proj.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.add_v_proj.lora_B, 140533119002048) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.add_v_proj.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.add_v_proj.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.add_v_proj.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.add_v_proj.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.add_v_proj.lora_B['default_0'], 140533118999216) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.add_v_proj.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.add_v_proj.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.add_v_proj.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.add_v_proj.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.add_v_proj.base_layer, 140581769887808) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.add_v_proj.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.add_v_proj.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.add_v_proj.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.add_v_proj.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.add_v_proj.lora_dropout, 140533119001520) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.add_v_proj.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.add_v_proj.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.add_v_proj.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.add_v_proj.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.add_v_proj.lora_dropout['default_0'], 140533118994800) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.add_v_proj.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.add_v_proj.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.add_v_proj.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.add_v_proj.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[9].attn.add_v_proj.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[9].attn.add_v_proj.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.add_v_proj.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[9].attn.add_v_proj.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.add_v_proj.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[9].attn.add_v_proj.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[9].attn.add_v_proj.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.add_v_proj.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.add_v_proj.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.add_v_proj._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.add_v_proj._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.add_v_proj.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[9].attn.add_v_proj.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[9].attn.add_v_proj.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.add_v_proj._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.add_v_proj._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.add_v_proj._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.add_v_proj._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.add_v_proj._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[9].attn.add_v_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[9].attn.add_v_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_add_out, accessed_by=DictGetItemGuardAccessor(to_add_out) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.to_add_out, 140533119416544) # encoder_hidden_states = attn.to_add_out(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1779 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_add_out.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[9].attn.to_add_out.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_add_out.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.to_add_out.training, 140591004393408) # encoder_hidden_states = attn.to_add_out(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1779 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_add_out._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_add_out.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.to_add_out.lora_A, 140533119415536) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_add_out.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_add_out.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.to_add_out.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_add_out.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.to_add_out.lora_A['default_0'], 140533119408816) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_add_out.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_add_out.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.to_add_out.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_add_out.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_add_out.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.to_add_out.lora_A['default_0'].weight, 140542619066480) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_add_out.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.to_add_out.lora_B, 140533119412176) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_add_out.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_add_out.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.to_add_out.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_add_out.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.to_add_out.lora_B['default_0'], 140533119414048) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_add_out.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_add_out.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.to_add_out.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_add_out.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.to_add_out.base_layer, 140581769888048) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_add_out.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_add_out.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.to_add_out.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_add_out.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.to_add_out.lora_dropout, 140533119406944) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_add_out.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_add_out.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.to_add_out.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_add_out.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.to_add_out.lora_dropout['default_0'], 140533119416640) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_add_out.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_add_out.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.to_add_out.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_add_out.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[9].attn.to_add_out.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[9].attn.to_add_out.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_add_out.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[9].attn.to_add_out.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_add_out.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[9].attn.to_add_out.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[9].attn.to_add_out.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_add_out.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.to_add_out.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_add_out._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_add_out._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_add_out.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[9].attn.to_add_out.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[9].attn.to_add_out.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_add_out._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.to_add_out._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_add_out._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_add_out._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.to_add_out._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[9].attn.to_add_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[9].attn.to_add_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.norm_added_k, accessed_by=DictGetItemGuardAccessor(norm_added_k) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.norm_added_k, 140581769888192) # if attn.norm_added_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1751 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.norm_added_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[9].attn.norm_added_k.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.norm_added_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.norm_added_k.training, 140591004393440) # if attn.norm_added_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1751 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.norm_added_k.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[9].attn.norm_added_k.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.norm_added_k._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.norm_added_k.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.norm_added_k.weight, 140581765826976) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.norm_added_k._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.norm_added_k._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.norm_added_k._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.norm_added_k._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.norm_added_q, accessed_by=DictGetItemGuardAccessor(norm_added_q) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.norm_added_q, 140581769888096) # if attn.norm_added_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1749 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.norm_added_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[9].attn.norm_added_q.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.norm_added_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.norm_added_q.training, 140591004393440) # if attn.norm_added_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1749 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.norm_added_q.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[9].attn.norm_added_q.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.norm_added_q._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.norm_added_q.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.norm_added_q.weight, 140581772714736) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.norm_added_q._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.norm_added_q._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.norm_added_q._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.norm_added_q._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.heads, accessed_by=DictGetItemGuardAccessor(heads) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[9].attn.heads == 24 # head_dim = inner_dim // attn.heads # diffusers/src/diffusers/models/attention_processor.py:1721 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn.processor, accessed_by=DictGetItemGuardAccessor(processor) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[9].attn.processor, 93831581524080) # attn_parameters = set(inspect.signature(self.processor.__call__).parameters.keys()) # diffusers/src/diffusers/models/attention_processor.py:479 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].attn.processor, 140581767544352) # return self.processor( # diffusers/src/diffusers/models/attention_processor.py:490 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].attn._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].norm1, accessed_by=DictGetItemGuardAccessor(norm1) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].norm1, 140581767543920) # norm_hidden_states, gate_msa, shift_mlp, scale_mlp, gate_mlp = self.norm1(hidden_states, emb=temb) # diffusers/src/diffusers/models/transformers/transformer_flux.py:165 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].norm1.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[9].norm1.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].norm1.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].norm1.training, 140591004393440) # norm_hidden_states, gate_msa, shift_mlp, scale_mlp, gate_mlp = self.norm1(hidden_states, emb=temb) # diffusers/src/diffusers/models/transformers/transformer_flux.py:165 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].norm1.emb, accessed_by=DictGetItemGuardAccessor(emb) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].norm1.emb, 140591004478624) # if self.emb is not None: # diffusers/src/diffusers/models/normalization.py:135 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].norm1._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].norm1.norm, accessed_by=DictGetItemGuardAccessor(norm) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].norm1.norm, 140581767544064) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:139 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].norm1.norm.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].norm1.norm.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].norm1.norm.training, 140591004393440) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:139 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].norm1.silu, accessed_by=DictGetItemGuardAccessor(silu) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].norm1.silu, 140581767543968) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].norm1.silu.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].norm1.silu.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].norm1.silu.training, 140591004393440) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].norm1.linear, accessed_by=DictGetItemGuardAccessor(linear) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].norm1.linear, 140533119265248) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].norm1.linear.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[9].norm1.linear.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].norm1.linear.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].norm1.linear.training, 140591004393408) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].norm1.linear._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].norm1.linear.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].norm1.linear.lora_A, 140533119269040) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].norm1.linear.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].norm1.linear.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].norm1.linear.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].norm1.linear.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].norm1.linear.lora_A['default_0'], 140533119263184) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].norm1.linear.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].norm1.linear.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].norm1.linear.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].norm1.linear.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].norm1.linear.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].norm1.linear.lora_A['default_0'].weight, 140542618881936) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].norm1.linear.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].norm1.linear.lora_B, 140533119269232) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].norm1.linear.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].norm1.linear.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].norm1.linear.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].norm1.linear.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].norm1.linear.lora_B['default_0'], 140533119262944) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].norm1.linear.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].norm1.linear.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].norm1.linear.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].norm1.linear.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].norm1.linear.base_layer, 140581767544016) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].norm1.linear.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].norm1.linear.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].norm1.linear.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].norm1.linear.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].norm1.linear.lora_dropout, 140533119269376) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].norm1.linear.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].norm1.linear.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].norm1.linear.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].norm1.linear.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].norm1.linear.lora_dropout['default_0'], 140533119265824) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].norm1.linear.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].norm1.linear.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].norm1.linear.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].norm1.linear.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[9].norm1.linear.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[9].norm1.linear.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].norm1.linear.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[9].norm1.linear.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].norm1.linear.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[9].norm1.linear.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[9].norm1.linear.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].norm1.linear.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].norm1.linear.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].norm1.linear._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].norm1.linear._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].norm1.linear.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[9].norm1.linear.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[9].norm1.linear.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].norm1.linear._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].norm1.linear._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].norm1.linear._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].norm1.linear._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].norm1.linear._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[9].norm1.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[9].norm1.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].norm1._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].norm1._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].norm1._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].norm1._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].norm2, accessed_by=DictGetItemGuardAccessor(norm2) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].norm2, 140581769888240) # norm_hidden_states = self.norm2(hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:182 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].norm2.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].norm2.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].norm2.training, 140591004393440) # norm_hidden_states = self.norm2(hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:182 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff_context, accessed_by=DictGetItemGuardAccessor(ff_context) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].ff_context, 140581769888576) # context_ff_output = self.ff_context(norm_encoder_hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:198 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff_context.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[9].ff_context.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff_context.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].ff_context.training, 140591004393440) # context_ff_output = self.ff_context(norm_encoder_hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:198 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff_context._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff_context.net, accessed_by=DictGetItemGuardAccessor(net) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].ff_context.net, 140581769888720) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[9].ff_context.net, 93831537618768) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- LENGTH_CHECK: len(L['self'].transformer_blocks[9].ff_context.net) == 3 # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff_context.net.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff_context.net.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].ff_context.net.training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff_context.net[0], accessed_by=GetItemGuardAccessor(0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].ff_context.net[0], 140581769888672) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff_context.net[0].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[9].ff_context.net[0].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff_context.net[0].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].ff_context.net[0].training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff_context.net[0]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff_context.net[0].proj, accessed_by=DictGetItemGuardAccessor(proj) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].ff_context.net[0].proj, 140533117811488) # hidden_states = self.proj(hidden_states) # diffusers/src/diffusers/models/activations.py:88 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff_context.net[0].proj.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[9].ff_context.net[0].proj.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff_context.net[0].proj.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].ff_context.net[0].proj.training, 140591004393408) # hidden_states = self.proj(hidden_states) # diffusers/src/diffusers/models/activations.py:88 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff_context.net[0].proj._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff_context.net[0].proj.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].ff_context.net[0].proj.lora_A, 140533117808752) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff_context.net[0].proj.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff_context.net[0].proj.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].ff_context.net[0].proj.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff_context.net[0].proj.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].ff_context.net[0].proj.lora_A['default_0'], 140533118736784) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff_context.net[0].proj.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff_context.net[0].proj.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].ff_context.net[0].proj.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff_context.net[0].proj.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff_context.net[0].proj.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].ff_context.net[0].proj.lora_A['default_0'].weight, 140542619071360) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff_context.net[0].proj.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].ff_context.net[0].proj.lora_B, 140533117797136) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff_context.net[0].proj.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff_context.net[0].proj.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].ff_context.net[0].proj.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff_context.net[0].proj.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].ff_context.net[0].proj.lora_B['default_0'], 140533118735152) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff_context.net[0].proj.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff_context.net[0].proj.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].ff_context.net[0].proj.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff_context.net[0].proj.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].ff_context.net[0].proj.base_layer, 140581769888768) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff_context.net[0].proj.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff_context.net[0].proj.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].ff_context.net[0].proj.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff_context.net[0].proj.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].ff_context.net[0].proj.lora_dropout, 140533117811296) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff_context.net[0].proj.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff_context.net[0].proj.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].ff_context.net[0].proj.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff_context.net[0].proj.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].ff_context.net[0].proj.lora_dropout['default_0'], 140533117811920) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff_context.net[0].proj.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff_context.net[0].proj.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].ff_context.net[0].proj.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff_context.net[0].proj.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[9].ff_context.net[0].proj.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[9].ff_context.net[0].proj.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff_context.net[0].proj.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[9].ff_context.net[0].proj.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff_context.net[0].proj.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[9].ff_context.net[0].proj.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[9].ff_context.net[0].proj.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff_context.net[0].proj.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].ff_context.net[0].proj.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff_context.net[0].proj._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff_context.net[0].proj._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff_context.net[0].proj.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[9].ff_context.net[0].proj.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[9].ff_context.net[0].proj.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff_context.net[0].proj._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].ff_context.net[0].proj._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff_context.net[0].proj._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff_context.net[0].proj._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff_context.net[0].proj._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[9].ff_context.net[0].proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[9].ff_context.net[0].proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff_context.net[0].approximate, accessed_by=DictGetItemGuardAccessor(approximate) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[9].ff_context.net[0].approximate == 'tanh' # return F.gelu(gate, approximate=self.approximate) # diffusers/src/diffusers/models/activations.py:83 in gelu V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff_context.net[0]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff_context.net[0]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff_context.net[0]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff_context.net[0]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff_context.net[1], accessed_by=GetItemGuardAccessor(1) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].ff_context.net[1], 140581769888864) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff_context.net[1].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff_context.net[1].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].ff_context.net[1].training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff_context.net[2], accessed_by=GetItemGuardAccessor(2) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].ff_context.net[2], 140533118734432) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff_context.net[2].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[9].ff_context.net[2].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff_context.net[2].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].ff_context.net[2].training, 140591004393408) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff_context.net[2]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff_context.net[2].lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].ff_context.net[2].lora_A, 140533118736304) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff_context.net[2].lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff_context.net[2].lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].ff_context.net[2].lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff_context.net[2].lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].ff_context.net[2].lora_A['default_0'], 140533118739328) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff_context.net[2].lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff_context.net[2].lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].ff_context.net[2].lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff_context.net[2].lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff_context.net[2].lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].ff_context.net[2].lora_A['default_0'].weight, 140542617460128) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff_context.net[2].lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].ff_context.net[2].lora_B, 140533118735344) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff_context.net[2].lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff_context.net[2].lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].ff_context.net[2].lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff_context.net[2].lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].ff_context.net[2].lora_B['default_0'], 140533118736544) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff_context.net[2].lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff_context.net[2].lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].ff_context.net[2].lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff_context.net[2].base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].ff_context.net[2].base_layer, 140581769888912) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff_context.net[2].base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff_context.net[2].base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].ff_context.net[2].base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff_context.net[2].lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].ff_context.net[2].lora_dropout, 140533118730736) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff_context.net[2].lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff_context.net[2].lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].ff_context.net[2].lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff_context.net[2].lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].ff_context.net[2].lora_dropout['default_0'], 140533118736256) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff_context.net[2].lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff_context.net[2].lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].ff_context.net[2].lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff_context.net[2].scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[9].ff_context.net[2].scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[9].ff_context.net[2].scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff_context.net[2].scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[9].ff_context.net[2].scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff_context.net[2].use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[9].ff_context.net[2].use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[9].ff_context.net[2].use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff_context.net[2].use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].ff_context.net[2].use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff_context.net[2]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff_context.net[2]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff_context.net[2].merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[9].ff_context.net[2].merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[9].ff_context.net[2].merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff_context.net[2]._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].ff_context.net[2]._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff_context.net[2]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff_context.net[2]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff_context.net[2]._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[9].ff_context.net[2]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[9].ff_context.net[2]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff_context._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff_context._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff_context._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].ff_context._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].norm1_context, accessed_by=DictGetItemGuardAccessor(norm1_context) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].norm1_context, 140581767544112) # norm_encoder_hidden_states, c_gate_msa, c_shift_mlp, c_scale_mlp, c_gate_mlp = self.norm1_context( # diffusers/src/diffusers/models/transformers/transformer_flux.py:167 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].norm1_context.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[9].norm1_context.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].norm1_context.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].norm1_context.training, 140591004393440) # norm_encoder_hidden_states, c_gate_msa, c_shift_mlp, c_scale_mlp, c_gate_mlp = self.norm1_context( # diffusers/src/diffusers/models/transformers/transformer_flux.py:167 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].norm1_context.emb, accessed_by=DictGetItemGuardAccessor(emb) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].norm1_context.emb, 140591004478624) # if self.emb is not None: # diffusers/src/diffusers/models/normalization.py:135 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].norm1_context._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].norm1_context.norm, accessed_by=DictGetItemGuardAccessor(norm) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].norm1_context.norm, 140581767544304) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:139 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].norm1_context.norm.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].norm1_context.norm.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].norm1_context.norm.training, 140591004393440) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:139 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].norm1_context.silu, accessed_by=DictGetItemGuardAccessor(silu) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].norm1_context.silu, 140581767544208) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].norm1_context.silu.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].norm1_context.silu.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].norm1_context.silu.training, 140591004393440) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].norm1_context.linear, accessed_by=DictGetItemGuardAccessor(linear) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].norm1_context.linear, 140533119263424) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].norm1_context.linear.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[9].norm1_context.linear.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].norm1_context.linear.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].norm1_context.linear.training, 140591004393408) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].norm1_context.linear._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].norm1_context.linear.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].norm1_context.linear.lora_A, 140533119265440) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].norm1_context.linear.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].norm1_context.linear.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].norm1_context.linear.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].norm1_context.linear.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].norm1_context.linear.lora_A['default_0'], 140533119265728) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].norm1_context.linear.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].norm1_context.linear.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].norm1_context.linear.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].norm1_context.linear.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].norm1_context.linear.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].norm1_context.linear.lora_A['default_0'].weight, 140542618881616) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].norm1_context.linear.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].norm1_context.linear.lora_B, 140533119262464) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].norm1_context.linear.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].norm1_context.linear.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].norm1_context.linear.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].norm1_context.linear.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].norm1_context.linear.lora_B['default_0'], 140533119262368) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].norm1_context.linear.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].norm1_context.linear.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].norm1_context.linear.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].norm1_context.linear.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].norm1_context.linear.base_layer, 140581767544256) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].norm1_context.linear.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].norm1_context.linear.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].norm1_context.linear.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].norm1_context.linear.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].norm1_context.linear.lora_dropout, 140533119262224) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].norm1_context.linear.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].norm1_context.linear.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].norm1_context.linear.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].norm1_context.linear.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].norm1_context.linear.lora_dropout['default_0'], 140533119265008) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].norm1_context.linear.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].norm1_context.linear.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].norm1_context.linear.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].norm1_context.linear.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[9].norm1_context.linear.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[9].norm1_context.linear.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].norm1_context.linear.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[9].norm1_context.linear.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].norm1_context.linear.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[9].norm1_context.linear.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[9].norm1_context.linear.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].norm1_context.linear.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].norm1_context.linear.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].norm1_context.linear._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].norm1_context.linear._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].norm1_context.linear.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[9].norm1_context.linear.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[9].norm1_context.linear.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].norm1_context.linear._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].norm1_context.linear._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].norm1_context.linear._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].norm1_context.linear._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].norm1_context.linear._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[9].norm1_context.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[9].norm1_context.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].norm1_context._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].norm1_context._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].norm1_context._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].norm1_context._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].norm2_context, accessed_by=DictGetItemGuardAccessor(norm2_context) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].norm2_context, 140581769888288) # norm_encoder_hidden_states = self.norm2_context(encoder_hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:195 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].norm2_context.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9].norm2_context.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[9].norm2_context.training, 140591004393440) # norm_encoder_hidden_states = self.norm2_context(encoder_hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:195 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[9]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | +- GuardManager: source=L['self'].transformer_blocks[10], accessed_by=GetItemGuardAccessor(10) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10], 140581767543584) # for index_block, block in enumerate(self.transformer_blocks): # diffusers/src/diffusers/models/transformers/transformer_flux.py:471 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[10].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].training, 140591004393440) # for index_block, block in enumerate(self.transformer_blocks): # diffusers/src/diffusers/models/transformers/transformer_flux.py:471 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff, accessed_by=DictGetItemGuardAccessor(ff) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].ff, 140581769890208) # ff_output = self.ff(norm_hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:185 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[10].ff.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].ff.training, 140591004393440) # ff_output = self.ff(norm_hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:185 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff.net, accessed_by=DictGetItemGuardAccessor(net) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].ff.net, 140581769890448) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[10].ff.net, 93831537618768) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- LENGTH_CHECK: len(L['self'].transformer_blocks[10].ff.net) == 3 # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff.net.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff.net.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].ff.net.training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff.net[0], accessed_by=GetItemGuardAccessor(0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].ff.net[0], 140581769890400) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff.net[0].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[10].ff.net[0].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff.net[0].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].ff.net[0].training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff.net[0]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff.net[0].proj, accessed_by=DictGetItemGuardAccessor(proj) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].ff.net[0].proj, 140533118282736) # hidden_states = self.proj(hidden_states) # diffusers/src/diffusers/models/activations.py:88 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff.net[0].proj.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[10].ff.net[0].proj.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff.net[0].proj.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].ff.net[0].proj.training, 140591004393408) # hidden_states = self.proj(hidden_states) # diffusers/src/diffusers/models/activations.py:88 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff.net[0].proj._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff.net[0].proj.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].ff.net[0].proj.lora_A, 140533118285856) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff.net[0].proj.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff.net[0].proj.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].ff.net[0].proj.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff.net[0].proj.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].ff.net[0].proj.lora_A['default_0'], 140533118237856) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff.net[0].proj.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff.net[0].proj.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].ff.net[0].proj.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff.net[0].proj.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff.net[0].proj.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].ff.net[0].proj.lora_A['default_0'].weight, 140537664996112) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff.net[0].proj.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].ff.net[0].proj.lora_B, 140533118276208) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff.net[0].proj.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff.net[0].proj.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].ff.net[0].proj.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff.net[0].proj.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].ff.net[0].proj.lora_B['default_0'], 140533118230704) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff.net[0].proj.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff.net[0].proj.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].ff.net[0].proj.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff.net[0].proj.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].ff.net[0].proj.base_layer, 140581769890496) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff.net[0].proj.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff.net[0].proj.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].ff.net[0].proj.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff.net[0].proj.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].ff.net[0].proj.lora_dropout, 140533118273760) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff.net[0].proj.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff.net[0].proj.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].ff.net[0].proj.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff.net[0].proj.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].ff.net[0].proj.lora_dropout['default_0'], 140533118272512) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff.net[0].proj.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff.net[0].proj.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].ff.net[0].proj.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff.net[0].proj.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[10].ff.net[0].proj.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[10].ff.net[0].proj.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff.net[0].proj.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[10].ff.net[0].proj.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff.net[0].proj.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[10].ff.net[0].proj.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[10].ff.net[0].proj.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff.net[0].proj.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].ff.net[0].proj.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff.net[0].proj._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff.net[0].proj._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff.net[0].proj.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[10].ff.net[0].proj.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[10].ff.net[0].proj.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff.net[0].proj._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].ff.net[0].proj._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff.net[0].proj._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff.net[0].proj._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff.net[0].proj._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[10].ff.net[0].proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[10].ff.net[0].proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff.net[0].approximate, accessed_by=DictGetItemGuardAccessor(approximate) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[10].ff.net[0].approximate == 'tanh' # return F.gelu(gate, approximate=self.approximate) # diffusers/src/diffusers/models/activations.py:83 in gelu V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff.net[0]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff.net[0]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff.net[0]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff.net[0]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff.net[1], accessed_by=GetItemGuardAccessor(1) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].ff.net[1], 140581769890544) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff.net[1].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff.net[1].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].ff.net[1].training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff.net[2], accessed_by=GetItemGuardAccessor(2) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].ff.net[2], 140533118235360) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff.net[2].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[10].ff.net[2].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff.net[2].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].ff.net[2].training, 140591004393408) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff.net[2]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff.net[2].lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].ff.net[2].lora_A, 140533118234688) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff.net[2].lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff.net[2].lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].ff.net[2].lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff.net[2].lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].ff.net[2].lora_A['default_0'], 140533118233920) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff.net[2].lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff.net[2].lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].ff.net[2].lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff.net[2].lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff.net[2].lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].ff.net[2].lora_A['default_0'].weight, 140537665002832) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff.net[2].lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].ff.net[2].lora_B, 140533118228064) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff.net[2].lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff.net[2].lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].ff.net[2].lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff.net[2].lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].ff.net[2].lora_B['default_0'], 140533118238288) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff.net[2].lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff.net[2].lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].ff.net[2].lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff.net[2].base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].ff.net[2].base_layer, 140581769890592) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff.net[2].base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff.net[2].base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].ff.net[2].base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff.net[2].lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].ff.net[2].lora_dropout, 140533118237664) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff.net[2].lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff.net[2].lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].ff.net[2].lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff.net[2].lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].ff.net[2].lora_dropout['default_0'], 140533118235648) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff.net[2].lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff.net[2].lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].ff.net[2].lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff.net[2].scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[10].ff.net[2].scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[10].ff.net[2].scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff.net[2].scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[10].ff.net[2].scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff.net[2].use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[10].ff.net[2].use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[10].ff.net[2].use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff.net[2].use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].ff.net[2].use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff.net[2]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff.net[2]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff.net[2].merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[10].ff.net[2].merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[10].ff.net[2].merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff.net[2]._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].ff.net[2]._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff.net[2]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff.net[2]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff.net[2]._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[10].ff.net[2]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[10].ff.net[2]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn, accessed_by=DictGetItemGuardAccessor(attn) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn, 140581769889440) # attn_output, context_attn_output = self.attn( # diffusers/src/diffusers/models/transformers/transformer_flux.py:172 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[10].attn.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.training, 140591004393440) # attn_output, context_attn_output = self.attn( # diffusers/src/diffusers/models/transformers/transformer_flux.py:172 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_k, accessed_by=DictGetItemGuardAccessor(to_k) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.to_k, 140533119539360) # key = attn.to_k(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1717 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[10].attn.to_k.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.to_k.training, 140591004393408) # key = attn.to_k(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1717 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_k._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_k.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.to_k.lora_A, 140533119535040) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_k.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_k.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.to_k.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_k.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.to_k.lora_A['default_0'], 140533121925280) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_k.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_k.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.to_k.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_k.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_k.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.to_k.lora_A['default_0'].weight, 140542617465888) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_k.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.to_k.lora_B, 140533119549344) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_k.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_k.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.to_k.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_k.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.to_k.lora_B['default_0'], 140533121934208) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_k.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_k.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.to_k.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_k.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.to_k.base_layer, 140581769889584) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_k.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_k.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.to_k.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_k.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.to_k.lora_dropout, 140533119541664) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_k.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_k.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.to_k.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_k.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.to_k.lora_dropout['default_0'], 140533119542048) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_k.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_k.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.to_k.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_k.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[10].attn.to_k.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[10].attn.to_k.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_k.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[10].attn.to_k.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_k.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[10].attn.to_k.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[10].attn.to_k.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_k.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.to_k.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_k._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_k._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_k.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[10].attn.to_k.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[10].attn.to_k.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_k._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.to_k._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_k._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_k._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_k._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[10].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[10].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_q, accessed_by=DictGetItemGuardAccessor(to_q) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.to_q, 140533119536816) # query = attn.to_q(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1716 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[10].attn.to_q.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.to_q.training, 140591004393408) # query = attn.to_q(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1716 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_q._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_q.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.to_q.lora_A, 140533119536144) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_q.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_q.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.to_q.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_q.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.to_q.lora_A['default_0'], 140533119536528) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_q.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_q.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.to_q.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_q.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_q.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.to_q.lora_A['default_0'].weight, 140542617459888) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_q.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.to_q.lora_B, 140533119534032) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_q.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_q.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.to_q.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_q.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.to_q.lora_B['default_0'], 140533119541760) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_q.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_q.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.to_q.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_q.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.to_q.base_layer, 140581769889680) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_q.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_q.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.to_q.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_q.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.to_q.lora_dropout, 140533119536000) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_q.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_q.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.to_q.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_q.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.to_q.lora_dropout['default_0'], 140533119535712) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_q.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_q.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.to_q.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_q.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[10].attn.to_q.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[10].attn.to_q.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_q.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[10].attn.to_q.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_q.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[10].attn.to_q.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[10].attn.to_q.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_q.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.to_q.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_q._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_q._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_q.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[10].attn.to_q.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[10].attn.to_q.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_q._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.to_q._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_q._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_q._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_q._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[10].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[10].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_v, accessed_by=DictGetItemGuardAccessor(to_v) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.to_v, 140533121933584) # value = attn.to_v(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1718 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_v.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[10].attn.to_v.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_v.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.to_v.training, 140591004393408) # value = attn.to_v(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1718 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_v._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_v.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.to_v.lora_A, 140533121938000) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_v.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_v.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.to_v.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_v.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.to_v.lora_A['default_0'], 140533121932768) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_v.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_v.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.to_v.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_v.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_v.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.to_v.lora_A['default_0'].weight, 140542617458528) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_v.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.to_v.lora_B, 140533121925184) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_v.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_v.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.to_v.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_v.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.to_v.lora_B['default_0'], 140533121933632) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_v.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_v.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.to_v.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_v.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.to_v.base_layer, 140581769889776) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_v.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_v.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.to_v.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_v.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.to_v.lora_dropout, 140533121937712) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_v.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_v.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.to_v.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_v.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.to_v.lora_dropout['default_0'], 140533121933488) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_v.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_v.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.to_v.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_v.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[10].attn.to_v.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[10].attn.to_v.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_v.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[10].attn.to_v.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_v.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[10].attn.to_v.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[10].attn.to_v.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_v.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.to_v.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_v._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_v._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_v.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[10].attn.to_v.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[10].attn.to_v.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_v._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.to_v._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_v._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_v._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_v._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[10].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[10].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.norm_k, accessed_by=DictGetItemGuardAccessor(norm_k) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.norm_k, 140581769889632) # if attn.norm_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1729 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.norm_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[10].attn.norm_k.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.norm_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.norm_k.training, 140591004393440) # if attn.norm_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1729 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.norm_k.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[10].attn.norm_k.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.norm_k._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.norm_k.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.norm_k.weight, 140581765992896) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.norm_k._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.norm_k._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.norm_k._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.norm_k._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.norm_q, accessed_by=DictGetItemGuardAccessor(norm_q) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.norm_q, 140581769889536) # if attn.norm_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1727 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.norm_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[10].attn.norm_q.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.norm_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.norm_q.training, 140591004393440) # if attn.norm_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1727 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.norm_q.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[10].attn.norm_q.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.norm_q._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.norm_q.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.norm_q.weight, 140581765992976) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.norm_q._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.norm_q._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.norm_q._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.norm_q._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_out, accessed_by=DictGetItemGuardAccessor(to_out) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.to_out, 140581769889968) # hidden_states = attn.to_out[0](hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1776 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_out.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_out.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.to_out.training, 140591004393440) # hidden_states = attn.to_out[0](hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1776 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_out[0], accessed_by=GetItemGuardAccessor(0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.to_out[0], 140533118285712) # hidden_states = attn.to_out[0](hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1776 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_out[0].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[10].attn.to_out[0].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_out[0].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.to_out[0].training, 140591004393408) # hidden_states = attn.to_out[0](hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1776 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_out[0]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_out[0].lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.to_out[0].lora_A, 140533118280384) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_out[0].lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_out[0].lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.to_out[0].lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_out[0].lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.to_out[0].lora_A['default_0'], 140533118278944) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_out[0].lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_out[0].lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.to_out[0].lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_out[0].lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_out[0].lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.to_out[0].lora_A['default_0'].weight, 140537669850576) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_out[0].lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.to_out[0].lora_B, 140533118273616) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_out[0].lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_out[0].lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.to_out[0].lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_out[0].lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.to_out[0].lora_B['default_0'], 140533118276928) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_out[0].lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_out[0].lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.to_out[0].lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_out[0].base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.to_out[0].base_layer, 140581769890016) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_out[0].base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_out[0].base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.to_out[0].base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_out[0].lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.to_out[0].lora_dropout, 140533118277168) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_out[0].lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_out[0].lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.to_out[0].lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_out[0].lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.to_out[0].lora_dropout['default_0'], 140533118285616) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_out[0].lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_out[0].lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.to_out[0].lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_out[0].scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[10].attn.to_out[0].scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[10].attn.to_out[0].scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_out[0].scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[10].attn.to_out[0].scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_out[0].use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[10].attn.to_out[0].use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[10].attn.to_out[0].use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_out[0].use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.to_out[0].use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_out[0]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_out[0]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_out[0].merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[10].attn.to_out[0].merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[10].attn.to_out[0].merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_out[0]._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.to_out[0]._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_out[0]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_out[0]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_out[0]._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[10].attn.to_out[0]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[10].attn.to_out[0]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_out[1], accessed_by=GetItemGuardAccessor(1) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.to_out[1], 140581769890064) # hidden_states = attn.to_out[1](hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1778 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_out[1].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_out[1].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.to_out[1].training, 140591004393440) # hidden_states = attn.to_out[1](hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1778 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.add_k_proj, accessed_by=DictGetItemGuardAccessor(add_k_proj) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.add_k_proj, 140533121932336) # encoder_hidden_states_key_proj = attn.add_k_proj(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1736 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.add_k_proj.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[10].attn.add_k_proj.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.add_k_proj.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.add_k_proj.training, 140591004393408) # encoder_hidden_states_key_proj = attn.add_k_proj(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1736 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.add_k_proj._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.add_k_proj.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.add_k_proj.lora_A, 140533119492704) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.add_k_proj.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.add_k_proj.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.add_k_proj.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.add_k_proj.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.add_k_proj.lora_A['default_0'], 140533119490688) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.add_k_proj.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.add_k_proj.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.add_k_proj.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.add_k_proj.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.add_k_proj.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.add_k_proj.lora_A['default_0'].weight, 140542608893296) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.add_k_proj.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.add_k_proj.lora_B, 140533119488528) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.add_k_proj.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.add_k_proj.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.add_k_proj.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.add_k_proj.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.add_k_proj.lora_B['default_0'], 140533119486272) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.add_k_proj.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.add_k_proj.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.add_k_proj.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.add_k_proj.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.add_k_proj.base_layer, 140581769889824) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.add_k_proj.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.add_k_proj.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.add_k_proj.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.add_k_proj.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.add_k_proj.lora_dropout, 140533121930704) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.add_k_proj.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.add_k_proj.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.add_k_proj.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.add_k_proj.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.add_k_proj.lora_dropout['default_0'], 140533121930080) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.add_k_proj.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.add_k_proj.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.add_k_proj.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.add_k_proj.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[10].attn.add_k_proj.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[10].attn.add_k_proj.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.add_k_proj.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[10].attn.add_k_proj.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.add_k_proj.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[10].attn.add_k_proj.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[10].attn.add_k_proj.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.add_k_proj.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.add_k_proj.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.add_k_proj._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.add_k_proj._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.add_k_proj.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[10].attn.add_k_proj.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[10].attn.add_k_proj.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.add_k_proj._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.add_k_proj._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.add_k_proj._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.add_k_proj._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.add_k_proj._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[10].attn.add_k_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[10].attn.add_k_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.add_q_proj, accessed_by=DictGetItemGuardAccessor(add_q_proj) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.add_q_proj, 140533118287728) # encoder_hidden_states_query_proj = attn.add_q_proj(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1735 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.add_q_proj.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[10].attn.add_q_proj.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.add_q_proj.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.add_q_proj.training, 140591004393408) # encoder_hidden_states_query_proj = attn.add_q_proj(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1735 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.add_q_proj._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.add_q_proj.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.add_q_proj.lora_A, 140533118287632) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.add_q_proj.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.add_q_proj.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.add_q_proj.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.add_q_proj.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.add_q_proj.lora_A['default_0'], 140533118271552) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.add_q_proj.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.add_q_proj.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.add_q_proj.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.add_q_proj.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.add_q_proj.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.add_q_proj.lora_A['default_0'].weight, 140537669857696) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.add_q_proj.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.add_q_proj.lora_B, 140533118286720) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.add_q_proj.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.add_q_proj.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.add_q_proj.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.add_q_proj.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.add_q_proj.lora_B['default_0'], 140533118287824) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.add_q_proj.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.add_q_proj.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.add_q_proj.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.add_q_proj.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.add_q_proj.base_layer, 140581769889920) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.add_q_proj.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.add_q_proj.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.add_q_proj.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.add_q_proj.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.add_q_proj.lora_dropout, 140533118279472) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.add_q_proj.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.add_q_proj.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.add_q_proj.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.add_q_proj.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.add_q_proj.lora_dropout['default_0'], 140533118286336) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.add_q_proj.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.add_q_proj.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.add_q_proj.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.add_q_proj.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[10].attn.add_q_proj.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[10].attn.add_q_proj.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.add_q_proj.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[10].attn.add_q_proj.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.add_q_proj.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[10].attn.add_q_proj.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[10].attn.add_q_proj.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.add_q_proj.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.add_q_proj.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.add_q_proj._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.add_q_proj._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.add_q_proj.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[10].attn.add_q_proj.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[10].attn.add_q_proj.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.add_q_proj._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.add_q_proj._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.add_q_proj._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.add_q_proj._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.add_q_proj._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[10].attn.add_q_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[10].attn.add_q_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.add_v_proj, accessed_by=DictGetItemGuardAccessor(add_v_proj) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.add_v_proj, 140533119494192) # encoder_hidden_states_value_proj = attn.add_v_proj(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1737 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.add_v_proj.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[10].attn.add_v_proj.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.add_v_proj.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.add_v_proj.training, 140591004393408) # encoder_hidden_states_value_proj = attn.add_v_proj(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1737 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.add_v_proj._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.add_v_proj.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.add_v_proj.lora_A, 140533119499760) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.add_v_proj.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.add_v_proj.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.add_v_proj.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.add_v_proj.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.add_v_proj.lora_A['default_0'], 140533118286144) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.add_v_proj.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.add_v_proj.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.add_v_proj.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.add_v_proj.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.add_v_proj.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.add_v_proj.lora_A['default_0'].weight, 140542608889856) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.add_v_proj.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.add_v_proj.lora_B, 140533119490208) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.add_v_proj.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.add_v_proj.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.add_v_proj.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.add_v_proj.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.add_v_proj.lora_B['default_0'], 140533118287488) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.add_v_proj.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.add_v_proj.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.add_v_proj.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.add_v_proj.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.add_v_proj.base_layer, 140581769889872) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.add_v_proj.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.add_v_proj.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.add_v_proj.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.add_v_proj.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.add_v_proj.lora_dropout, 140533119495776) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.add_v_proj.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.add_v_proj.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.add_v_proj.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.add_v_proj.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.add_v_proj.lora_dropout['default_0'], 140533119496784) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.add_v_proj.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.add_v_proj.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.add_v_proj.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.add_v_proj.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[10].attn.add_v_proj.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[10].attn.add_v_proj.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.add_v_proj.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[10].attn.add_v_proj.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.add_v_proj.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[10].attn.add_v_proj.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[10].attn.add_v_proj.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.add_v_proj.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.add_v_proj.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.add_v_proj._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.add_v_proj._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.add_v_proj.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[10].attn.add_v_proj.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[10].attn.add_v_proj.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.add_v_proj._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.add_v_proj._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.add_v_proj._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.add_v_proj._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.add_v_proj._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[10].attn.add_v_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[10].attn.add_v_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_add_out, accessed_by=DictGetItemGuardAccessor(to_add_out) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.to_add_out, 140533118279328) # encoder_hidden_states = attn.to_add_out(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1779 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_add_out.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[10].attn.to_add_out.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_add_out.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.to_add_out.training, 140591004393408) # encoder_hidden_states = attn.to_add_out(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1779 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_add_out._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_add_out.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.to_add_out.lora_A, 140533118278896) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_add_out.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_add_out.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.to_add_out.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_add_out.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.to_add_out.lora_A['default_0'], 140533118271744) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_add_out.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_add_out.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.to_add_out.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_add_out.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_add_out.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.to_add_out.lora_A['default_0'].weight, 140537665009152) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_add_out.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.to_add_out.lora_B, 140533118278368) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_add_out.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_add_out.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.to_add_out.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_add_out.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.to_add_out.lora_B['default_0'], 140533118280288) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_add_out.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_add_out.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.to_add_out.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_add_out.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.to_add_out.base_layer, 140581769890112) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_add_out.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_add_out.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.to_add_out.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_add_out.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.to_add_out.lora_dropout, 140533118280240) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_add_out.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_add_out.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.to_add_out.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_add_out.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.to_add_out.lora_dropout['default_0'], 140533118281200) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_add_out.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_add_out.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.to_add_out.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_add_out.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[10].attn.to_add_out.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[10].attn.to_add_out.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_add_out.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[10].attn.to_add_out.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_add_out.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[10].attn.to_add_out.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[10].attn.to_add_out.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_add_out.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.to_add_out.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_add_out._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_add_out._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_add_out.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[10].attn.to_add_out.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[10].attn.to_add_out.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_add_out._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.to_add_out._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_add_out._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_add_out._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.to_add_out._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[10].attn.to_add_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[10].attn.to_add_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.norm_added_k, accessed_by=DictGetItemGuardAccessor(norm_added_k) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.norm_added_k, 140581769890256) # if attn.norm_added_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1751 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.norm_added_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[10].attn.norm_added_k.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.norm_added_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.norm_added_k.training, 140591004393440) # if attn.norm_added_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1751 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.norm_added_k.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[10].attn.norm_added_k.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.norm_added_k._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.norm_added_k.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.norm_added_k.weight, 140581765992736) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.norm_added_k._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.norm_added_k._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.norm_added_k._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.norm_added_k._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.norm_added_q, accessed_by=DictGetItemGuardAccessor(norm_added_q) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.norm_added_q, 140581769890160) # if attn.norm_added_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1749 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.norm_added_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[10].attn.norm_added_q.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.norm_added_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.norm_added_q.training, 140591004393440) # if attn.norm_added_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1749 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.norm_added_q.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[10].attn.norm_added_q.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.norm_added_q._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.norm_added_q.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.norm_added_q.weight, 140581765992816) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.norm_added_q._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.norm_added_q._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.norm_added_q._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.norm_added_q._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.heads, accessed_by=DictGetItemGuardAccessor(heads) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[10].attn.heads == 24 # head_dim = inner_dim // attn.heads # diffusers/src/diffusers/models/attention_processor.py:1721 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn.processor, accessed_by=DictGetItemGuardAccessor(processor) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[10].attn.processor, 93831581524080) # attn_parameters = set(inspect.signature(self.processor.__call__).parameters.keys()) # diffusers/src/diffusers/models/attention_processor.py:479 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].attn.processor, 140581769889392) # return self.processor( # diffusers/src/diffusers/models/attention_processor.py:490 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].attn._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].norm1, accessed_by=DictGetItemGuardAccessor(norm1) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].norm1, 140581769888960) # norm_hidden_states, gate_msa, shift_mlp, scale_mlp, gate_mlp = self.norm1(hidden_states, emb=temb) # diffusers/src/diffusers/models/transformers/transformer_flux.py:165 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].norm1.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[10].norm1.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].norm1.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].norm1.training, 140591004393440) # norm_hidden_states, gate_msa, shift_mlp, scale_mlp, gate_mlp = self.norm1(hidden_states, emb=temb) # diffusers/src/diffusers/models/transformers/transformer_flux.py:165 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].norm1.emb, accessed_by=DictGetItemGuardAccessor(emb) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].norm1.emb, 140591004478624) # if self.emb is not None: # diffusers/src/diffusers/models/normalization.py:135 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].norm1._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].norm1.norm, accessed_by=DictGetItemGuardAccessor(norm) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].norm1.norm, 140581769889104) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:139 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].norm1.norm.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].norm1.norm.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].norm1.norm.training, 140591004393440) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:139 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].norm1.silu, accessed_by=DictGetItemGuardAccessor(silu) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].norm1.silu, 140581769889008) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].norm1.silu.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].norm1.silu.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].norm1.silu.training, 140591004393440) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].norm1.linear, accessed_by=DictGetItemGuardAccessor(linear) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].norm1.linear, 140533118744848) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].norm1.linear.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[10].norm1.linear.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].norm1.linear.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].norm1.linear.training, 140591004393408) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].norm1.linear._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].norm1.linear.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].norm1.linear.lora_A, 140533118730640) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].norm1.linear.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].norm1.linear.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].norm1.linear.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].norm1.linear.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].norm1.linear.lora_A['default_0'], 140533119542240) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].norm1.linear.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].norm1.linear.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].norm1.linear.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].norm1.linear.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].norm1.linear.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].norm1.linear.lora_A['default_0'].weight, 140542617460768) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].norm1.linear.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].norm1.linear.lora_B, 140533118739376) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].norm1.linear.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].norm1.linear.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].norm1.linear.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].norm1.linear.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].norm1.linear.lora_B['default_0'], 140533119536912) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].norm1.linear.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].norm1.linear.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].norm1.linear.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].norm1.linear.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].norm1.linear.base_layer, 140581769889056) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].norm1.linear.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].norm1.linear.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].norm1.linear.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].norm1.linear.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].norm1.linear.lora_dropout, 140533118742976) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].norm1.linear.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].norm1.linear.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].norm1.linear.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].norm1.linear.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].norm1.linear.lora_dropout['default_0'], 140533118745040) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].norm1.linear.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].norm1.linear.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].norm1.linear.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].norm1.linear.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[10].norm1.linear.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[10].norm1.linear.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].norm1.linear.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[10].norm1.linear.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].norm1.linear.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[10].norm1.linear.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[10].norm1.linear.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].norm1.linear.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].norm1.linear.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].norm1.linear._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].norm1.linear._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].norm1.linear.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[10].norm1.linear.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[10].norm1.linear.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].norm1.linear._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].norm1.linear._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].norm1.linear._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].norm1.linear._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].norm1.linear._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[10].norm1.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[10].norm1.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].norm1._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].norm1._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].norm1._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].norm1._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].norm2, accessed_by=DictGetItemGuardAccessor(norm2) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].norm2, 140581769890304) # norm_hidden_states = self.norm2(hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:182 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].norm2.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].norm2.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].norm2.training, 140591004393440) # norm_hidden_states = self.norm2(hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:182 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff_context, accessed_by=DictGetItemGuardAccessor(ff_context) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].ff_context, 140581769890640) # context_ff_output = self.ff_context(norm_encoder_hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:198 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff_context.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[10].ff_context.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff_context.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].ff_context.training, 140591004393440) # context_ff_output = self.ff_context(norm_encoder_hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:198 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff_context._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff_context.net, accessed_by=DictGetItemGuardAccessor(net) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].ff_context.net, 140581769890784) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[10].ff_context.net, 93831537618768) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- LENGTH_CHECK: len(L['self'].transformer_blocks[10].ff_context.net) == 3 # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff_context.net.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff_context.net.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].ff_context.net.training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff_context.net[0], accessed_by=GetItemGuardAccessor(0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].ff_context.net[0], 140581769890736) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff_context.net[0].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[10].ff_context.net[0].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff_context.net[0].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].ff_context.net[0].training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff_context.net[0]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff_context.net[0].proj, accessed_by=DictGetItemGuardAccessor(proj) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].ff_context.net[0].proj, 140533118234400) # hidden_states = self.proj(hidden_states) # diffusers/src/diffusers/models/activations.py:88 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff_context.net[0].proj.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[10].ff_context.net[0].proj.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff_context.net[0].proj.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].ff_context.net[0].proj.training, 140591004393408) # hidden_states = self.proj(hidden_states) # diffusers/src/diffusers/models/activations.py:88 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff_context.net[0].proj._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff_context.net[0].proj.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].ff_context.net[0].proj.lora_A, 140533118238624) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff_context.net[0].proj.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff_context.net[0].proj.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].ff_context.net[0].proj.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff_context.net[0].proj.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].ff_context.net[0].proj.lora_A['default_0'], 140533118237232) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff_context.net[0].proj.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff_context.net[0].proj.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].ff_context.net[0].proj.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff_context.net[0].proj.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff_context.net[0].proj.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].ff_context.net[0].proj.lora_A['default_0'].weight, 140537665009632) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff_context.net[0].proj.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].ff_context.net[0].proj.lora_B, 140533118235888) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff_context.net[0].proj.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff_context.net[0].proj.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].ff_context.net[0].proj.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff_context.net[0].proj.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].ff_context.net[0].proj.lora_B['default_0'], 140533118238240) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff_context.net[0].proj.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff_context.net[0].proj.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].ff_context.net[0].proj.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff_context.net[0].proj.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].ff_context.net[0].proj.base_layer, 140581769890832) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff_context.net[0].proj.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff_context.net[0].proj.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].ff_context.net[0].proj.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff_context.net[0].proj.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].ff_context.net[0].proj.lora_dropout, 140533118224992) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff_context.net[0].proj.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff_context.net[0].proj.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].ff_context.net[0].proj.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff_context.net[0].proj.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].ff_context.net[0].proj.lora_dropout['default_0'], 140533118238384) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff_context.net[0].proj.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff_context.net[0].proj.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].ff_context.net[0].proj.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff_context.net[0].proj.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[10].ff_context.net[0].proj.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[10].ff_context.net[0].proj.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff_context.net[0].proj.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[10].ff_context.net[0].proj.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff_context.net[0].proj.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[10].ff_context.net[0].proj.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[10].ff_context.net[0].proj.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff_context.net[0].proj.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].ff_context.net[0].proj.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff_context.net[0].proj._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff_context.net[0].proj._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff_context.net[0].proj.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[10].ff_context.net[0].proj.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[10].ff_context.net[0].proj.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff_context.net[0].proj._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].ff_context.net[0].proj._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff_context.net[0].proj._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff_context.net[0].proj._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff_context.net[0].proj._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[10].ff_context.net[0].proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[10].ff_context.net[0].proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff_context.net[0].approximate, accessed_by=DictGetItemGuardAccessor(approximate) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[10].ff_context.net[0].approximate == 'tanh' # return F.gelu(gate, approximate=self.approximate) # diffusers/src/diffusers/models/activations.py:83 in gelu V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff_context.net[0]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff_context.net[0]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff_context.net[0]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff_context.net[0]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff_context.net[1], accessed_by=GetItemGuardAccessor(1) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].ff_context.net[1], 140581769890928) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff_context.net[1].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff_context.net[1].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].ff_context.net[1].training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff_context.net[2], accessed_by=GetItemGuardAccessor(2) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].ff_context.net[2], 140533118233248) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff_context.net[2].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[10].ff_context.net[2].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff_context.net[2].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].ff_context.net[2].training, 140591004393408) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff_context.net[2]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff_context.net[2].lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].ff_context.net[2].lora_A, 140533118601152) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff_context.net[2].lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff_context.net[2].lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].ff_context.net[2].lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff_context.net[2].lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].ff_context.net[2].lora_A['default_0'], 140533118609312) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff_context.net[2].lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff_context.net[2].lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].ff_context.net[2].lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff_context.net[2].lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff_context.net[2].lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].ff_context.net[2].lora_A['default_0'].weight, 140537665004992) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff_context.net[2].lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].ff_context.net[2].lora_B, 140533118606816) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff_context.net[2].lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff_context.net[2].lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].ff_context.net[2].lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff_context.net[2].lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].ff_context.net[2].lora_B['default_0'], 140533118613968) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff_context.net[2].lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff_context.net[2].lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].ff_context.net[2].lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff_context.net[2].base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].ff_context.net[2].base_layer, 140581769890976) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff_context.net[2].base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff_context.net[2].base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].ff_context.net[2].base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff_context.net[2].lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].ff_context.net[2].lora_dropout, 140533118611376) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff_context.net[2].lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff_context.net[2].lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].ff_context.net[2].lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff_context.net[2].lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].ff_context.net[2].lora_dropout['default_0'], 140533118604896) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff_context.net[2].lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff_context.net[2].lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].ff_context.net[2].lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff_context.net[2].scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[10].ff_context.net[2].scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[10].ff_context.net[2].scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff_context.net[2].scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[10].ff_context.net[2].scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff_context.net[2].use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[10].ff_context.net[2].use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[10].ff_context.net[2].use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff_context.net[2].use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].ff_context.net[2].use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff_context.net[2]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff_context.net[2]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff_context.net[2].merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[10].ff_context.net[2].merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[10].ff_context.net[2].merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff_context.net[2]._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].ff_context.net[2]._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff_context.net[2]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff_context.net[2]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff_context.net[2]._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[10].ff_context.net[2]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[10].ff_context.net[2]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff_context._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff_context._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff_context._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].ff_context._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].norm1_context, accessed_by=DictGetItemGuardAccessor(norm1_context) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].norm1_context, 140581769889152) # norm_encoder_hidden_states, c_gate_msa, c_shift_mlp, c_scale_mlp, c_gate_mlp = self.norm1_context( # diffusers/src/diffusers/models/transformers/transformer_flux.py:167 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].norm1_context.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[10].norm1_context.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].norm1_context.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].norm1_context.training, 140591004393440) # norm_encoder_hidden_states, c_gate_msa, c_shift_mlp, c_scale_mlp, c_gate_mlp = self.norm1_context( # diffusers/src/diffusers/models/transformers/transformer_flux.py:167 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].norm1_context.emb, accessed_by=DictGetItemGuardAccessor(emb) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].norm1_context.emb, 140591004478624) # if self.emb is not None: # diffusers/src/diffusers/models/normalization.py:135 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].norm1_context._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].norm1_context.norm, accessed_by=DictGetItemGuardAccessor(norm) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].norm1_context.norm, 140581769889344) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:139 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].norm1_context.norm.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].norm1_context.norm.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].norm1_context.norm.training, 140591004393440) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:139 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].norm1_context.silu, accessed_by=DictGetItemGuardAccessor(silu) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].norm1_context.silu, 140581769889248) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].norm1_context.silu.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].norm1_context.silu.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].norm1_context.silu.training, 140591004393440) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].norm1_context.linear, accessed_by=DictGetItemGuardAccessor(linear) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].norm1_context.linear, 140533119545120) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].norm1_context.linear.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[10].norm1_context.linear.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].norm1_context.linear.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].norm1_context.linear.training, 140591004393408) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].norm1_context.linear._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].norm1_context.linear.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].norm1_context.linear.lora_A, 140533119536624) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].norm1_context.linear.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].norm1_context.linear.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].norm1_context.linear.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].norm1_context.linear.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].norm1_context.linear.lora_A['default_0'], 140533119539024) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].norm1_context.linear.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].norm1_context.linear.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].norm1_context.linear.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].norm1_context.linear.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].norm1_context.linear.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].norm1_context.linear.lora_A['default_0'].weight, 140542617454208) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].norm1_context.linear.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].norm1_context.linear.lora_B, 140533119538304) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].norm1_context.linear.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].norm1_context.linear.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].norm1_context.linear.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].norm1_context.linear.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].norm1_context.linear.lora_B['default_0'], 140533119542912) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].norm1_context.linear.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].norm1_context.linear.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].norm1_context.linear.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].norm1_context.linear.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].norm1_context.linear.base_layer, 140581769889296) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].norm1_context.linear.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].norm1_context.linear.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].norm1_context.linear.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].norm1_context.linear.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].norm1_context.linear.lora_dropout, 140533119535616) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].norm1_context.linear.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].norm1_context.linear.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].norm1_context.linear.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].norm1_context.linear.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].norm1_context.linear.lora_dropout['default_0'], 140533119546560) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].norm1_context.linear.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].norm1_context.linear.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].norm1_context.linear.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].norm1_context.linear.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[10].norm1_context.linear.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[10].norm1_context.linear.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].norm1_context.linear.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[10].norm1_context.linear.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].norm1_context.linear.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[10].norm1_context.linear.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[10].norm1_context.linear.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].norm1_context.linear.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].norm1_context.linear.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].norm1_context.linear._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].norm1_context.linear._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].norm1_context.linear.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[10].norm1_context.linear.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[10].norm1_context.linear.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].norm1_context.linear._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].norm1_context.linear._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].norm1_context.linear._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].norm1_context.linear._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].norm1_context.linear._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[10].norm1_context.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[10].norm1_context.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].norm1_context._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].norm1_context._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].norm1_context._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].norm1_context._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].norm2_context, accessed_by=DictGetItemGuardAccessor(norm2_context) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].norm2_context, 140581769890352) # norm_encoder_hidden_states = self.norm2_context(encoder_hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:195 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].norm2_context.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10].norm2_context.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[10].norm2_context.training, 140591004393440) # norm_encoder_hidden_states = self.norm2_context(encoder_hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:195 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[10]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | +- GuardManager: source=L['self'].transformer_blocks[11], accessed_by=GetItemGuardAccessor(11) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11], 140581769888624) # for index_block, block in enumerate(self.transformer_blocks): # diffusers/src/diffusers/models/transformers/transformer_flux.py:471 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[11].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].training, 140591004393440) # for index_block, block in enumerate(self.transformer_blocks): # diffusers/src/diffusers/models/transformers/transformer_flux.py:471 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff, accessed_by=DictGetItemGuardAccessor(ff) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].ff, 140581769892272) # ff_output = self.ff(norm_hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:185 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[11].ff.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].ff.training, 140591004393440) # ff_output = self.ff(norm_hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:185 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff.net, accessed_by=DictGetItemGuardAccessor(net) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].ff.net, 140581769892512) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[11].ff.net, 93831537618768) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- LENGTH_CHECK: len(L['self'].transformer_blocks[11].ff.net) == 3 # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff.net.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff.net.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].ff.net.training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff.net[0], accessed_by=GetItemGuardAccessor(0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].ff.net[0], 140581769892464) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff.net[0].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[11].ff.net[0].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff.net[0].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].ff.net[0].training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff.net[0]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff.net[0].proj, accessed_by=DictGetItemGuardAccessor(proj) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].ff.net[0].proj, 140533116804768) # hidden_states = self.proj(hidden_states) # diffusers/src/diffusers/models/activations.py:88 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff.net[0].proj.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[11].ff.net[0].proj.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff.net[0].proj.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].ff.net[0].proj.training, 140591004393408) # hidden_states = self.proj(hidden_states) # diffusers/src/diffusers/models/activations.py:88 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff.net[0].proj._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff.net[0].proj.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].ff.net[0].proj.lora_A, 140533116801648) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff.net[0].proj.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff.net[0].proj.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].ff.net[0].proj.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff.net[0].proj.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].ff.net[0].proj.lora_A['default_0'], 140533116876880) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff.net[0].proj.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff.net[0].proj.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].ff.net[0].proj.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff.net[0].proj.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff.net[0].proj.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].ff.net[0].proj.lora_A['default_0'].weight, 140526664241360) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff.net[0].proj.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].ff.net[0].proj.lora_B, 140533116804912) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff.net[0].proj.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff.net[0].proj.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].ff.net[0].proj.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff.net[0].proj.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].ff.net[0].proj.lora_B['default_0'], 140533116877168) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff.net[0].proj.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff.net[0].proj.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].ff.net[0].proj.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff.net[0].proj.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].ff.net[0].proj.base_layer, 140581769892560) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff.net[0].proj.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff.net[0].proj.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].ff.net[0].proj.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff.net[0].proj.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].ff.net[0].proj.lora_dropout, 140533116813072) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff.net[0].proj.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff.net[0].proj.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].ff.net[0].proj.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff.net[0].proj.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].ff.net[0].proj.lora_dropout['default_0'], 140533116801312) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff.net[0].proj.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff.net[0].proj.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].ff.net[0].proj.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff.net[0].proj.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[11].ff.net[0].proj.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[11].ff.net[0].proj.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff.net[0].proj.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[11].ff.net[0].proj.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff.net[0].proj.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[11].ff.net[0].proj.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[11].ff.net[0].proj.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff.net[0].proj.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].ff.net[0].proj.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff.net[0].proj._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff.net[0].proj._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff.net[0].proj.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[11].ff.net[0].proj.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[11].ff.net[0].proj.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff.net[0].proj._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].ff.net[0].proj._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff.net[0].proj._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff.net[0].proj._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff.net[0].proj._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[11].ff.net[0].proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[11].ff.net[0].proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff.net[0].approximate, accessed_by=DictGetItemGuardAccessor(approximate) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[11].ff.net[0].approximate == 'tanh' # return F.gelu(gate, approximate=self.approximate) # diffusers/src/diffusers/models/activations.py:83 in gelu V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff.net[0]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff.net[0]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff.net[0]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff.net[0]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff.net[1], accessed_by=GetItemGuardAccessor(1) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].ff.net[1], 140581769892608) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff.net[1].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff.net[1].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].ff.net[1].training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff.net[2], accessed_by=GetItemGuardAccessor(2) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].ff.net[2], 140533116878608) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff.net[2].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[11].ff.net[2].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff.net[2].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].ff.net[2].training, 140591004393408) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff.net[2]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff.net[2].lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].ff.net[2].lora_A, 140533116877072) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff.net[2].lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff.net[2].lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].ff.net[2].lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff.net[2].lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].ff.net[2].lora_A['default_0'], 140533116878416) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff.net[2].lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff.net[2].lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].ff.net[2].lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff.net[2].lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff.net[2].lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].ff.net[2].lora_A['default_0'].weight, 140526664243120) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff.net[2].lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].ff.net[2].lora_B, 140533116877216) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff.net[2].lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff.net[2].lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].ff.net[2].lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff.net[2].lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].ff.net[2].lora_B['default_0'], 140533116875296) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff.net[2].lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff.net[2].lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].ff.net[2].lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff.net[2].base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].ff.net[2].base_layer, 140581769892656) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff.net[2].base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff.net[2].base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].ff.net[2].base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff.net[2].lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].ff.net[2].lora_dropout, 140533116876592) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff.net[2].lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff.net[2].lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].ff.net[2].lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff.net[2].lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].ff.net[2].lora_dropout['default_0'], 140533116878704) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff.net[2].lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff.net[2].lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].ff.net[2].lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff.net[2].scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[11].ff.net[2].scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[11].ff.net[2].scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff.net[2].scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[11].ff.net[2].scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff.net[2].use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[11].ff.net[2].use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[11].ff.net[2].use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff.net[2].use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].ff.net[2].use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff.net[2]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff.net[2]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff.net[2].merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[11].ff.net[2].merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[11].ff.net[2].merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff.net[2]._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].ff.net[2]._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff.net[2]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff.net[2]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff.net[2]._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[11].ff.net[2]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[11].ff.net[2]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn, accessed_by=DictGetItemGuardAccessor(attn) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn, 140581769891504) # attn_output, context_attn_output = self.attn( # diffusers/src/diffusers/models/transformers/transformer_flux.py:172 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[11].attn.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.training, 140591004393440) # attn_output, context_attn_output = self.attn( # diffusers/src/diffusers/models/transformers/transformer_flux.py:172 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_k, accessed_by=DictGetItemGuardAccessor(to_k) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.to_k, 140533118777472) # key = attn.to_k(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1717 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[11].attn.to_k.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.to_k.training, 140591004393408) # key = attn.to_k(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1717 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_k._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_k.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.to_k.lora_A, 140533118776848) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_k.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_k.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.to_k.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_k.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.to_k.lora_A['default_0'], 140533118668032) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_k.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_k.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.to_k.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_k.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_k.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.to_k.lora_A['default_0'].weight, 140542614636160) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_k.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.to_k.lora_B, 140533118775456) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_k.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_k.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.to_k.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_k.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.to_k.lora_B['default_0'], 140533118678448) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_k.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_k.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.to_k.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_k.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.to_k.base_layer, 140581769891648) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_k.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_k.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.to_k.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_k.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.to_k.lora_dropout, 140533118778528) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_k.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_k.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.to_k.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_k.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.to_k.lora_dropout['default_0'], 140533118775648) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_k.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_k.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.to_k.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_k.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[11].attn.to_k.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[11].attn.to_k.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_k.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[11].attn.to_k.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_k.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[11].attn.to_k.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[11].attn.to_k.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_k.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.to_k.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_k._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_k._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_k.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[11].attn.to_k.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[11].attn.to_k.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_k._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.to_k._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_k._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_k._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_k._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[11].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[11].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_q, accessed_by=DictGetItemGuardAccessor(to_q) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.to_q, 140533118607968) # query = attn.to_q(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1716 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[11].attn.to_q.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.to_q.training, 140591004393408) # query = attn.to_q(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1716 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_q._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_q.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.to_q.lora_A, 140533118773920) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_q.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_q.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.to_q.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_q.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.to_q.lora_A['default_0'], 140533118773344) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_q.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_q.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.to_q.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_q.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_q.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.to_q.lora_A['default_0'].weight, 140542614648800) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_q.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.to_q.lora_B, 140533118772768) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_q.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_q.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.to_q.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_q.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.to_q.lora_B['default_0'], 140533118769840) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_q.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_q.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.to_q.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_q.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.to_q.base_layer, 140581769891744) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_q.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_q.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.to_q.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_q.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.to_q.lora_dropout, 140533118776704) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_q.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_q.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.to_q.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_q.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.to_q.lora_dropout['default_0'], 140533118601488) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_q.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_q.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.to_q.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_q.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[11].attn.to_q.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[11].attn.to_q.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_q.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[11].attn.to_q.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_q.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[11].attn.to_q.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[11].attn.to_q.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_q.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.to_q.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_q._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_q._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_q.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[11].attn.to_q.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[11].attn.to_q.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_q._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.to_q._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_q._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_q._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_q._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[11].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[11].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_v, accessed_by=DictGetItemGuardAccessor(to_v) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.to_v, 140533118666928) # value = attn.to_v(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1718 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_v.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[11].attn.to_v.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_v.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.to_v.training, 140591004393408) # value = attn.to_v(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1718 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_v._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_v.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.to_v.lora_A, 140533118668224) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_v.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_v.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.to_v.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_v.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.to_v.lora_A['default_0'], 140533118669472) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_v.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_v.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.to_v.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_v.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_v.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.to_v.lora_A['default_0'].weight, 140542614636640) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_v.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.to_v.lora_B, 140533118667696) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_v.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_v.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.to_v.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_v.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.to_v.lora_B['default_0'], 140533118667936) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_v.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_v.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.to_v.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_v.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.to_v.base_layer, 140581769891840) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_v.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_v.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.to_v.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_v.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.to_v.lora_dropout, 140533118677776) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_v.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_v.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.to_v.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_v.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.to_v.lora_dropout['default_0'], 140533118667840) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_v.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_v.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.to_v.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_v.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[11].attn.to_v.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[11].attn.to_v.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_v.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[11].attn.to_v.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_v.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[11].attn.to_v.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[11].attn.to_v.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_v.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.to_v.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_v._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_v._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_v.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[11].attn.to_v.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[11].attn.to_v.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_v._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.to_v._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_v._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_v._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_v._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[11].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[11].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.norm_k, accessed_by=DictGetItemGuardAccessor(norm_k) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.norm_k, 140581769891696) # if attn.norm_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1729 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.norm_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[11].attn.norm_k.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.norm_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.norm_k.training, 140591004393440) # if attn.norm_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1729 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.norm_k.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[11].attn.norm_k.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.norm_k._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.norm_k.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.norm_k.weight, 140581765994416) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.norm_k._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.norm_k._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.norm_k._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.norm_k._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.norm_q, accessed_by=DictGetItemGuardAccessor(norm_q) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.norm_q, 140581769891600) # if attn.norm_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1727 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.norm_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[11].attn.norm_q.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.norm_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.norm_q.training, 140591004393440) # if attn.norm_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1727 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.norm_q.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[11].attn.norm_q.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.norm_q._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.norm_q.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.norm_q.weight, 140581765994496) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.norm_q._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.norm_q._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.norm_q._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.norm_q._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_out, accessed_by=DictGetItemGuardAccessor(to_out) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.to_out, 140581769892032) # hidden_states = attn.to_out[0](hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1776 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_out.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_out.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.to_out.training, 140591004393440) # hidden_states = attn.to_out[0](hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1776 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_out[0], accessed_by=GetItemGuardAccessor(0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.to_out[0], 140533118677968) # hidden_states = attn.to_out[0](hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1776 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_out[0].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[11].attn.to_out[0].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_out[0].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.to_out[0].training, 140591004393408) # hidden_states = attn.to_out[0](hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1776 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_out[0]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_out[0].lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.to_out[0].lora_A, 140533118677296) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_out[0].lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_out[0].lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.to_out[0].lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_out[0].lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.to_out[0].lora_A['default_0'], 140533118668368) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_out[0].lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_out[0].lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.to_out[0].lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_out[0].lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_out[0].lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.to_out[0].lora_A['default_0'].weight, 140526277608608) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_out[0].lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.to_out[0].lora_B, 140533118678352) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_out[0].lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_out[0].lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.to_out[0].lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_out[0].lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.to_out[0].lora_B['default_0'], 140533118677728) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_out[0].lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_out[0].lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.to_out[0].lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_out[0].base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.to_out[0].base_layer, 140581769892080) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_out[0].base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_out[0].base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.to_out[0].base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_out[0].lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.to_out[0].lora_dropout, 140533118675472) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_out[0].lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_out[0].lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.to_out[0].lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_out[0].lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.to_out[0].lora_dropout['default_0'], 140533118676816) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_out[0].lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_out[0].lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.to_out[0].lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_out[0].scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[11].attn.to_out[0].scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[11].attn.to_out[0].scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_out[0].scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[11].attn.to_out[0].scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_out[0].use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[11].attn.to_out[0].use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[11].attn.to_out[0].use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_out[0].use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.to_out[0].use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_out[0]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_out[0]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_out[0].merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[11].attn.to_out[0].merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[11].attn.to_out[0].merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_out[0]._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.to_out[0]._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_out[0]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_out[0]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_out[0]._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[11].attn.to_out[0]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[11].attn.to_out[0]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_out[1], accessed_by=GetItemGuardAccessor(1) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.to_out[1], 140581769892128) # hidden_states = attn.to_out[1](hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1778 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_out[1].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_out[1].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.to_out[1].training, 140591004393440) # hidden_states = attn.to_out[1](hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1778 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.add_k_proj, accessed_by=DictGetItemGuardAccessor(add_k_proj) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.add_k_proj, 140533118680416) # encoder_hidden_states_key_proj = attn.add_k_proj(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1736 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.add_k_proj.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[11].attn.add_k_proj.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.add_k_proj.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.add_k_proj.training, 140591004393408) # encoder_hidden_states_key_proj = attn.add_k_proj(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1736 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.add_k_proj._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.add_k_proj.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.add_k_proj.lora_A, 140533118677632) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.add_k_proj.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.add_k_proj.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.add_k_proj.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.add_k_proj.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.add_k_proj.lora_A['default_0'], 140533118665488) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.add_k_proj.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.add_k_proj.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.add_k_proj.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.add_k_proj.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.add_k_proj.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.add_k_proj.lora_A['default_0'].weight, 140526277608928) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.add_k_proj.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.add_k_proj.lora_B, 140533118677872) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.add_k_proj.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.add_k_proj.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.add_k_proj.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.add_k_proj.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.add_k_proj.lora_B['default_0'], 140533118679408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.add_k_proj.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.add_k_proj.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.add_k_proj.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.add_k_proj.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.add_k_proj.base_layer, 140581769891888) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.add_k_proj.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.add_k_proj.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.add_k_proj.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.add_k_proj.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.add_k_proj.lora_dropout, 140533118665536) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.add_k_proj.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.add_k_proj.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.add_k_proj.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.add_k_proj.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.add_k_proj.lora_dropout['default_0'], 140533118676864) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.add_k_proj.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.add_k_proj.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.add_k_proj.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.add_k_proj.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[11].attn.add_k_proj.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[11].attn.add_k_proj.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.add_k_proj.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[11].attn.add_k_proj.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.add_k_proj.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[11].attn.add_k_proj.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[11].attn.add_k_proj.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.add_k_proj.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.add_k_proj.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.add_k_proj._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.add_k_proj._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.add_k_proj.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[11].attn.add_k_proj.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[11].attn.add_k_proj.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.add_k_proj._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.add_k_proj._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.add_k_proj._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.add_k_proj._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.add_k_proj._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[11].attn.add_k_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[11].attn.add_k_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.add_q_proj, accessed_by=DictGetItemGuardAccessor(add_q_proj) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.add_q_proj, 140533118679648) # encoder_hidden_states_query_proj = attn.add_q_proj(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1735 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.add_q_proj.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[11].attn.add_q_proj.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.add_q_proj.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.add_q_proj.training, 140591004393408) # encoder_hidden_states_query_proj = attn.add_q_proj(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1735 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.add_q_proj._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.add_q_proj.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.add_q_proj.lora_A, 140533118679984) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.add_q_proj.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.add_q_proj.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.add_q_proj.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.add_q_proj.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.add_q_proj.lora_A['default_0'], 140533118676000) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.add_q_proj.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.add_q_proj.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.add_q_proj.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.add_q_proj.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.add_q_proj.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.add_q_proj.lora_A['default_0'].weight, 140526277618288) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.add_q_proj.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.add_q_proj.lora_B, 140533118679264) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.add_q_proj.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.add_q_proj.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.add_q_proj.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.add_q_proj.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.add_q_proj.lora_B['default_0'], 140533118679504) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.add_q_proj.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.add_q_proj.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.add_q_proj.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.add_q_proj.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.add_q_proj.base_layer, 140581769891984) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.add_q_proj.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.add_q_proj.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.add_q_proj.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.add_q_proj.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.add_q_proj.lora_dropout, 140533118678064) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.add_q_proj.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.add_q_proj.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.add_q_proj.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.add_q_proj.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.add_q_proj.lora_dropout['default_0'], 140533118679456) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.add_q_proj.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.add_q_proj.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.add_q_proj.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.add_q_proj.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[11].attn.add_q_proj.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[11].attn.add_q_proj.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.add_q_proj.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[11].attn.add_q_proj.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.add_q_proj.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[11].attn.add_q_proj.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[11].attn.add_q_proj.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.add_q_proj.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.add_q_proj.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.add_q_proj._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.add_q_proj._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.add_q_proj.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[11].attn.add_q_proj.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[11].attn.add_q_proj.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.add_q_proj._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.add_q_proj._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.add_q_proj._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.add_q_proj._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.add_q_proj._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[11].attn.add_q_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[11].attn.add_q_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.add_v_proj, accessed_by=DictGetItemGuardAccessor(add_v_proj) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.add_v_proj, 140533118678208) # encoder_hidden_states_value_proj = attn.add_v_proj(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1737 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.add_v_proj.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[11].attn.add_v_proj.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.add_v_proj.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.add_v_proj.training, 140591004393408) # encoder_hidden_states_value_proj = attn.add_v_proj(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1737 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.add_v_proj._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.add_v_proj.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.add_v_proj.lora_A, 140533118679696) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.add_v_proj.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.add_v_proj.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.add_v_proj.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.add_v_proj.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.add_v_proj.lora_A['default_0'], 140533118677008) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.add_v_proj.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.add_v_proj.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.add_v_proj.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.add_v_proj.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.add_v_proj.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.add_v_proj.lora_A['default_0'].weight, 140526277621168) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.add_v_proj.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.add_v_proj.lora_B, 140533118667360) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.add_v_proj.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.add_v_proj.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.add_v_proj.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.add_v_proj.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.add_v_proj.lora_B['default_0'], 140533118680032) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.add_v_proj.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.add_v_proj.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.add_v_proj.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.add_v_proj.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.add_v_proj.base_layer, 140581769891936) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.add_v_proj.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.add_v_proj.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.add_v_proj.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.add_v_proj.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.add_v_proj.lora_dropout, 140533118678640) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.add_v_proj.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.add_v_proj.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.add_v_proj.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.add_v_proj.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.add_v_proj.lora_dropout['default_0'], 140533118676480) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.add_v_proj.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.add_v_proj.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.add_v_proj.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.add_v_proj.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[11].attn.add_v_proj.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[11].attn.add_v_proj.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.add_v_proj.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[11].attn.add_v_proj.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.add_v_proj.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[11].attn.add_v_proj.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[11].attn.add_v_proj.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.add_v_proj.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.add_v_proj.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.add_v_proj._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.add_v_proj._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.add_v_proj.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[11].attn.add_v_proj.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[11].attn.add_v_proj.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.add_v_proj._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.add_v_proj._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.add_v_proj._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.add_v_proj._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.add_v_proj._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[11].attn.add_v_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[11].attn.add_v_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_add_out, accessed_by=DictGetItemGuardAccessor(to_add_out) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.to_add_out, 140533118668512) # encoder_hidden_states = attn.to_add_out(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1779 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_add_out.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[11].attn.to_add_out.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_add_out.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.to_add_out.training, 140591004393408) # encoder_hidden_states = attn.to_add_out(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1779 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_add_out._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_add_out.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.to_add_out.lora_A, 140533116799776) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_add_out.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_add_out.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.to_add_out.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_add_out.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.to_add_out.lora_A['default_0'], 140533116806304) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_add_out.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_add_out.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.to_add_out.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_add_out.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_add_out.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.to_add_out.lora_A['default_0'].weight, 140526664241520) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_add_out.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.to_add_out.lora_B, 140533116799440) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_add_out.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_add_out.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.to_add_out.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_add_out.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.to_add_out.lora_B['default_0'], 140533116799872) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_add_out.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_add_out.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.to_add_out.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_add_out.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.to_add_out.base_layer, 140581769892176) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_add_out.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_add_out.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.to_add_out.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_add_out.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.to_add_out.lora_dropout, 140533118674224) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_add_out.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_add_out.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.to_add_out.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_add_out.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.to_add_out.lora_dropout['default_0'], 140533118668704) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_add_out.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_add_out.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.to_add_out.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_add_out.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[11].attn.to_add_out.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[11].attn.to_add_out.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_add_out.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[11].attn.to_add_out.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_add_out.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[11].attn.to_add_out.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[11].attn.to_add_out.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_add_out.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.to_add_out.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_add_out._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_add_out._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_add_out.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[11].attn.to_add_out.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[11].attn.to_add_out.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_add_out._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.to_add_out._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_add_out._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_add_out._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.to_add_out._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[11].attn.to_add_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[11].attn.to_add_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.norm_added_k, accessed_by=DictGetItemGuardAccessor(norm_added_k) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.norm_added_k, 140581769892320) # if attn.norm_added_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1751 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.norm_added_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[11].attn.norm_added_k.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.norm_added_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.norm_added_k.training, 140591004393440) # if attn.norm_added_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1751 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.norm_added_k.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[11].attn.norm_added_k.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.norm_added_k._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.norm_added_k.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.norm_added_k.weight, 140581765994256) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.norm_added_k._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.norm_added_k._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.norm_added_k._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.norm_added_k._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.norm_added_q, accessed_by=DictGetItemGuardAccessor(norm_added_q) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.norm_added_q, 140581769892224) # if attn.norm_added_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1749 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.norm_added_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[11].attn.norm_added_q.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.norm_added_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.norm_added_q.training, 140591004393440) # if attn.norm_added_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1749 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.norm_added_q.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[11].attn.norm_added_q.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.norm_added_q._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.norm_added_q.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.norm_added_q.weight, 140581765994336) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.norm_added_q._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.norm_added_q._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.norm_added_q._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.norm_added_q._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.heads, accessed_by=DictGetItemGuardAccessor(heads) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[11].attn.heads == 24 # head_dim = inner_dim // attn.heads # diffusers/src/diffusers/models/attention_processor.py:1721 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn.processor, accessed_by=DictGetItemGuardAccessor(processor) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[11].attn.processor, 93831581524080) # attn_parameters = set(inspect.signature(self.processor.__call__).parameters.keys()) # diffusers/src/diffusers/models/attention_processor.py:479 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].attn.processor, 140581769891456) # return self.processor( # diffusers/src/diffusers/models/attention_processor.py:490 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].attn._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].norm1, accessed_by=DictGetItemGuardAccessor(norm1) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].norm1, 140581769891024) # norm_hidden_states, gate_msa, shift_mlp, scale_mlp, gate_mlp = self.norm1(hidden_states, emb=temb) # diffusers/src/diffusers/models/transformers/transformer_flux.py:165 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].norm1.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[11].norm1.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].norm1.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].norm1.training, 140591004393440) # norm_hidden_states, gate_msa, shift_mlp, scale_mlp, gate_mlp = self.norm1(hidden_states, emb=temb) # diffusers/src/diffusers/models/transformers/transformer_flux.py:165 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].norm1.emb, accessed_by=DictGetItemGuardAccessor(emb) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].norm1.emb, 140591004478624) # if self.emb is not None: # diffusers/src/diffusers/models/normalization.py:135 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].norm1._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].norm1.norm, accessed_by=DictGetItemGuardAccessor(norm) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].norm1.norm, 140581769891168) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:139 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].norm1.norm.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].norm1.norm.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].norm1.norm.training, 140591004393440) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:139 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].norm1.silu, accessed_by=DictGetItemGuardAccessor(silu) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].norm1.silu, 140581769891072) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].norm1.silu.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].norm1.silu.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].norm1.silu.training, 140591004393440) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].norm1.linear, accessed_by=DictGetItemGuardAccessor(linear) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].norm1.linear, 140533118613440) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].norm1.linear.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[11].norm1.linear.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].norm1.linear.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].norm1.linear.training, 140591004393408) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].norm1.linear._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].norm1.linear.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].norm1.linear.lora_A, 140533118615264) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].norm1.linear.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].norm1.linear.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].norm1.linear.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].norm1.linear.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].norm1.linear.lora_A['default_0'], 140533118614016) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].norm1.linear.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].norm1.linear.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].norm1.linear.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].norm1.linear.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].norm1.linear.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].norm1.linear.lora_A['default_0'].weight, 140537665004512) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].norm1.linear.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].norm1.linear.lora_B, 140533118606912) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].norm1.linear.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].norm1.linear.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].norm1.linear.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].norm1.linear.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].norm1.linear.lora_B['default_0'], 140533118614064) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].norm1.linear.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].norm1.linear.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].norm1.linear.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].norm1.linear.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].norm1.linear.base_layer, 140581769891120) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].norm1.linear.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].norm1.linear.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].norm1.linear.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].norm1.linear.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].norm1.linear.lora_dropout, 140533118611424) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].norm1.linear.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].norm1.linear.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].norm1.linear.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].norm1.linear.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].norm1.linear.lora_dropout['default_0'], 140533118602352) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].norm1.linear.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].norm1.linear.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].norm1.linear.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].norm1.linear.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[11].norm1.linear.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[11].norm1.linear.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].norm1.linear.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[11].norm1.linear.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].norm1.linear.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[11].norm1.linear.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[11].norm1.linear.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].norm1.linear.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].norm1.linear.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].norm1.linear._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].norm1.linear._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].norm1.linear.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[11].norm1.linear.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[11].norm1.linear.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].norm1.linear._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].norm1.linear._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].norm1.linear._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].norm1.linear._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].norm1.linear._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[11].norm1.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[11].norm1.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].norm1._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].norm1._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].norm1._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].norm1._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].norm2, accessed_by=DictGetItemGuardAccessor(norm2) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].norm2, 140581769892368) # norm_hidden_states = self.norm2(hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:182 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].norm2.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].norm2.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].norm2.training, 140591004393440) # norm_hidden_states = self.norm2(hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:182 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff_context, accessed_by=DictGetItemGuardAccessor(ff_context) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].ff_context, 140581769892704) # context_ff_output = self.ff_context(norm_encoder_hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:198 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff_context.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[11].ff_context.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff_context.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].ff_context.training, 140591004393440) # context_ff_output = self.ff_context(norm_encoder_hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:198 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff_context._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff_context.net, accessed_by=DictGetItemGuardAccessor(net) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].ff_context.net, 140581769892848) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[11].ff_context.net, 93831537618768) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- LENGTH_CHECK: len(L['self'].transformer_blocks[11].ff_context.net) == 3 # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff_context.net.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff_context.net.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].ff_context.net.training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff_context.net[0], accessed_by=GetItemGuardAccessor(0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].ff_context.net[0], 140581769892800) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff_context.net[0].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[11].ff_context.net[0].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff_context.net[0].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].ff_context.net[0].training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff_context.net[0]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff_context.net[0].proj, accessed_by=DictGetItemGuardAccessor(proj) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].ff_context.net[0].proj, 140533116873520) # hidden_states = self.proj(hidden_states) # diffusers/src/diffusers/models/activations.py:88 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff_context.net[0].proj.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[11].ff_context.net[0].proj.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff_context.net[0].proj.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].ff_context.net[0].proj.training, 140591004393408) # hidden_states = self.proj(hidden_states) # diffusers/src/diffusers/models/activations.py:88 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff_context.net[0].proj._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff_context.net[0].proj.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].ff_context.net[0].proj.lora_A, 140533116872272) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff_context.net[0].proj.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff_context.net[0].proj.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].ff_context.net[0].proj.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff_context.net[0].proj.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].ff_context.net[0].proj.lora_A['default_0'], 140533116863824) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff_context.net[0].proj.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff_context.net[0].proj.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].ff_context.net[0].proj.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff_context.net[0].proj.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff_context.net[0].proj.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].ff_context.net[0].proj.lora_A['default_0'].weight, 140526664244320) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff_context.net[0].proj.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].ff_context.net[0].proj.lora_B, 140533116871024) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff_context.net[0].proj.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff_context.net[0].proj.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].ff_context.net[0].proj.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff_context.net[0].proj.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].ff_context.net[0].proj.lora_B['default_0'], 140533116875680) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff_context.net[0].proj.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff_context.net[0].proj.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].ff_context.net[0].proj.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff_context.net[0].proj.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].ff_context.net[0].proj.base_layer, 140581769892896) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff_context.net[0].proj.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff_context.net[0].proj.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].ff_context.net[0].proj.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff_context.net[0].proj.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].ff_context.net[0].proj.lora_dropout, 140533116876208) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff_context.net[0].proj.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff_context.net[0].proj.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].ff_context.net[0].proj.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff_context.net[0].proj.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].ff_context.net[0].proj.lora_dropout['default_0'], 140533116876352) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff_context.net[0].proj.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff_context.net[0].proj.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].ff_context.net[0].proj.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff_context.net[0].proj.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[11].ff_context.net[0].proj.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[11].ff_context.net[0].proj.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff_context.net[0].proj.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[11].ff_context.net[0].proj.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff_context.net[0].proj.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[11].ff_context.net[0].proj.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[11].ff_context.net[0].proj.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff_context.net[0].proj.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].ff_context.net[0].proj.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff_context.net[0].proj._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff_context.net[0].proj._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff_context.net[0].proj.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[11].ff_context.net[0].proj.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[11].ff_context.net[0].proj.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff_context.net[0].proj._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].ff_context.net[0].proj._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff_context.net[0].proj._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff_context.net[0].proj._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff_context.net[0].proj._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[11].ff_context.net[0].proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[11].ff_context.net[0].proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff_context.net[0].approximate, accessed_by=DictGetItemGuardAccessor(approximate) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[11].ff_context.net[0].approximate == 'tanh' # return F.gelu(gate, approximate=self.approximate) # diffusers/src/diffusers/models/activations.py:83 in gelu V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff_context.net[0]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff_context.net[0]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff_context.net[0]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff_context.net[0]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff_context.net[1], accessed_by=GetItemGuardAccessor(1) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].ff_context.net[1], 140581769892992) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff_context.net[1].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff_context.net[1].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].ff_context.net[1].training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff_context.net[2], accessed_by=GetItemGuardAccessor(2) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].ff_context.net[2], 140533116872416) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff_context.net[2].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[11].ff_context.net[2].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff_context.net[2].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].ff_context.net[2].training, 140591004393408) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff_context.net[2]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff_context.net[2].lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].ff_context.net[2].lora_A, 140533116876400) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff_context.net[2].lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff_context.net[2].lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].ff_context.net[2].lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff_context.net[2].lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].ff_context.net[2].lora_A['default_0'], 140533116877552) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff_context.net[2].lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff_context.net[2].lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].ff_context.net[2].lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff_context.net[2].lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff_context.net[2].lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].ff_context.net[2].lora_A['default_0'].weight, 140526562430944) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff_context.net[2].lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].ff_context.net[2].lora_B, 140533116874576) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff_context.net[2].lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff_context.net[2].lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].ff_context.net[2].lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff_context.net[2].lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].ff_context.net[2].lora_B['default_0'], 140533116864064) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff_context.net[2].lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff_context.net[2].lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].ff_context.net[2].lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff_context.net[2].base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].ff_context.net[2].base_layer, 140581769893040) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff_context.net[2].base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff_context.net[2].base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].ff_context.net[2].base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff_context.net[2].lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].ff_context.net[2].lora_dropout, 140533116868240) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff_context.net[2].lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff_context.net[2].lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].ff_context.net[2].lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff_context.net[2].lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].ff_context.net[2].lora_dropout['default_0'], 140533116871696) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff_context.net[2].lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff_context.net[2].lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].ff_context.net[2].lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff_context.net[2].scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[11].ff_context.net[2].scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[11].ff_context.net[2].scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff_context.net[2].scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[11].ff_context.net[2].scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff_context.net[2].use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[11].ff_context.net[2].use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[11].ff_context.net[2].use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff_context.net[2].use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].ff_context.net[2].use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff_context.net[2]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff_context.net[2]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff_context.net[2].merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[11].ff_context.net[2].merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[11].ff_context.net[2].merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff_context.net[2]._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].ff_context.net[2]._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff_context.net[2]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff_context.net[2]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff_context.net[2]._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[11].ff_context.net[2]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[11].ff_context.net[2]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff_context._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff_context._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff_context._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].ff_context._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].norm1_context, accessed_by=DictGetItemGuardAccessor(norm1_context) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].norm1_context, 140581769891216) # norm_encoder_hidden_states, c_gate_msa, c_shift_mlp, c_scale_mlp, c_gate_mlp = self.norm1_context( # diffusers/src/diffusers/models/transformers/transformer_flux.py:167 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].norm1_context.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[11].norm1_context.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].norm1_context.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].norm1_context.training, 140591004393440) # norm_encoder_hidden_states, c_gate_msa, c_shift_mlp, c_scale_mlp, c_gate_mlp = self.norm1_context( # diffusers/src/diffusers/models/transformers/transformer_flux.py:167 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].norm1_context.emb, accessed_by=DictGetItemGuardAccessor(emb) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].norm1_context.emb, 140591004478624) # if self.emb is not None: # diffusers/src/diffusers/models/normalization.py:135 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].norm1_context._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].norm1_context.norm, accessed_by=DictGetItemGuardAccessor(norm) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].norm1_context.norm, 140581769891408) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:139 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].norm1_context.norm.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].norm1_context.norm.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].norm1_context.norm.training, 140591004393440) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:139 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].norm1_context.silu, accessed_by=DictGetItemGuardAccessor(silu) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].norm1_context.silu, 140581769891312) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].norm1_context.silu.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].norm1_context.silu.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].norm1_context.silu.training, 140591004393440) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].norm1_context.linear, accessed_by=DictGetItemGuardAccessor(linear) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].norm1_context.linear, 140533118613536) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].norm1_context.linear.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[11].norm1_context.linear.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].norm1_context.linear.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].norm1_context.linear.training, 140591004393408) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].norm1_context.linear._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].norm1_context.linear.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].norm1_context.linear.lora_A, 140533118614256) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].norm1_context.linear.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].norm1_context.linear.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].norm1_context.linear.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].norm1_context.linear.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].norm1_context.linear.lora_A['default_0'], 140533118602976) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].norm1_context.linear.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].norm1_context.linear.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].norm1_context.linear.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].norm1_context.linear.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].norm1_context.linear.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].norm1_context.linear.lora_A['default_0'].weight, 140542614642720) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].norm1_context.linear.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].norm1_context.linear.lora_B, 140533118610800) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].norm1_context.linear.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].norm1_context.linear.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].norm1_context.linear.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].norm1_context.linear.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].norm1_context.linear.lora_B['default_0'], 140533118613248) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].norm1_context.linear.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].norm1_context.linear.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].norm1_context.linear.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].norm1_context.linear.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].norm1_context.linear.base_layer, 140581769891360) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].norm1_context.linear.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].norm1_context.linear.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].norm1_context.linear.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].norm1_context.linear.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].norm1_context.linear.lora_dropout, 140533118608736) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].norm1_context.linear.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].norm1_context.linear.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].norm1_context.linear.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].norm1_context.linear.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].norm1_context.linear.lora_dropout['default_0'], 140533118612240) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].norm1_context.linear.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].norm1_context.linear.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].norm1_context.linear.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].norm1_context.linear.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[11].norm1_context.linear.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[11].norm1_context.linear.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].norm1_context.linear.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[11].norm1_context.linear.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].norm1_context.linear.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[11].norm1_context.linear.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[11].norm1_context.linear.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].norm1_context.linear.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].norm1_context.linear.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].norm1_context.linear._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].norm1_context.linear._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].norm1_context.linear.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[11].norm1_context.linear.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[11].norm1_context.linear.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].norm1_context.linear._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].norm1_context.linear._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].norm1_context.linear._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].norm1_context.linear._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].norm1_context.linear._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[11].norm1_context.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[11].norm1_context.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].norm1_context._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].norm1_context._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].norm1_context._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].norm1_context._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].norm2_context, accessed_by=DictGetItemGuardAccessor(norm2_context) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].norm2_context, 140581769892416) # norm_encoder_hidden_states = self.norm2_context(encoder_hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:195 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].norm2_context.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11].norm2_context.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[11].norm2_context.training, 140591004393440) # norm_encoder_hidden_states = self.norm2_context(encoder_hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:195 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[11]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | +- GuardManager: source=L['self'].transformer_blocks[12], accessed_by=GetItemGuardAccessor(12) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12], 140581769890688) # for index_block, block in enumerate(self.transformer_blocks): # diffusers/src/diffusers/models/transformers/transformer_flux.py:471 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[12].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].training, 140591004393440) # for index_block, block in enumerate(self.transformer_blocks): # diffusers/src/diffusers/models/transformers/transformer_flux.py:471 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff, accessed_by=DictGetItemGuardAccessor(ff) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].ff, 140581769894336) # ff_output = self.ff(norm_hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:185 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[12].ff.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].ff.training, 140591004393440) # ff_output = self.ff(norm_hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:185 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff.net, accessed_by=DictGetItemGuardAccessor(net) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].ff.net, 140581769894576) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[12].ff.net, 93831537618768) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- LENGTH_CHECK: len(L['self'].transformer_blocks[12].ff.net) == 3 # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff.net.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff.net.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].ff.net.training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff.net[0], accessed_by=GetItemGuardAccessor(0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].ff.net[0], 140581769894528) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff.net[0].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[12].ff.net[0].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff.net[0].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].ff.net[0].training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff.net[0]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff.net[0].proj, accessed_by=DictGetItemGuardAccessor(proj) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].ff.net[0].proj, 140533118131008) # hidden_states = self.proj(hidden_states) # diffusers/src/diffusers/models/activations.py:88 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff.net[0].proj.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[12].ff.net[0].proj.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff.net[0].proj.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].ff.net[0].proj.training, 140591004393408) # hidden_states = self.proj(hidden_states) # diffusers/src/diffusers/models/activations.py:88 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff.net[0].proj._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff.net[0].proj.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].ff.net[0].proj.lora_A, 140533118131296) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff.net[0].proj.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff.net[0].proj.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].ff.net[0].proj.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff.net[0].proj.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].ff.net[0].proj.lora_A['default_0'], 140533118125296) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff.net[0].proj.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff.net[0].proj.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].ff.net[0].proj.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff.net[0].proj.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff.net[0].proj.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].ff.net[0].proj.lora_A['default_0'].weight, 140526771805760) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff.net[0].proj.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].ff.net[0].proj.lora_B, 140533118133936) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff.net[0].proj.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff.net[0].proj.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].ff.net[0].proj.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff.net[0].proj.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].ff.net[0].proj.lora_B['default_0'], 140533118134176) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff.net[0].proj.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff.net[0].proj.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].ff.net[0].proj.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff.net[0].proj.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].ff.net[0].proj.base_layer, 140581769894624) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff.net[0].proj.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff.net[0].proj.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].ff.net[0].proj.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff.net[0].proj.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].ff.net[0].proj.lora_dropout, 140533118130144) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff.net[0].proj.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff.net[0].proj.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].ff.net[0].proj.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff.net[0].proj.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].ff.net[0].proj.lora_dropout['default_0'], 140533118138976) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff.net[0].proj.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff.net[0].proj.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].ff.net[0].proj.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff.net[0].proj.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[12].ff.net[0].proj.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[12].ff.net[0].proj.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff.net[0].proj.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[12].ff.net[0].proj.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff.net[0].proj.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[12].ff.net[0].proj.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[12].ff.net[0].proj.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff.net[0].proj.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].ff.net[0].proj.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff.net[0].proj._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff.net[0].proj._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff.net[0].proj.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[12].ff.net[0].proj.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[12].ff.net[0].proj.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff.net[0].proj._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].ff.net[0].proj._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff.net[0].proj._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff.net[0].proj._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff.net[0].proj._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[12].ff.net[0].proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[12].ff.net[0].proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff.net[0].approximate, accessed_by=DictGetItemGuardAccessor(approximate) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[12].ff.net[0].approximate == 'tanh' # return F.gelu(gate, approximate=self.approximate) # diffusers/src/diffusers/models/activations.py:83 in gelu V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff.net[0]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff.net[0]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff.net[0]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff.net[0]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff.net[1], accessed_by=GetItemGuardAccessor(1) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].ff.net[1], 140581769894672) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff.net[1].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff.net[1].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].ff.net[1].training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff.net[2], accessed_by=GetItemGuardAccessor(2) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].ff.net[2], 140533118124960) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff.net[2].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[12].ff.net[2].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff.net[2].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].ff.net[2].training, 140591004393408) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff.net[2]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff.net[2].lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].ff.net[2].lora_A, 140533118137728) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff.net[2].lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff.net[2].lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].ff.net[2].lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff.net[2].lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].ff.net[2].lora_A['default_0'], 140533117015456) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff.net[2].lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff.net[2].lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].ff.net[2].lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff.net[2].lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff.net[2].lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].ff.net[2].lora_A['default_0'].weight, 140526771803280) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff.net[2].lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].ff.net[2].lora_B, 140533118138064) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff.net[2].lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff.net[2].lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].ff.net[2].lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff.net[2].lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].ff.net[2].lora_B['default_0'], 140533117011808) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff.net[2].lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff.net[2].lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].ff.net[2].lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff.net[2].base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].ff.net[2].base_layer, 140581769894720) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff.net[2].base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff.net[2].base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].ff.net[2].base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff.net[2].lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].ff.net[2].lora_dropout, 140533118131728) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff.net[2].lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff.net[2].lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].ff.net[2].lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff.net[2].lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].ff.net[2].lora_dropout['default_0'], 140533118137200) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff.net[2].lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff.net[2].lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].ff.net[2].lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff.net[2].scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[12].ff.net[2].scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[12].ff.net[2].scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff.net[2].scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[12].ff.net[2].scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff.net[2].use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[12].ff.net[2].use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[12].ff.net[2].use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff.net[2].use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].ff.net[2].use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff.net[2]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff.net[2]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff.net[2].merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[12].ff.net[2].merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[12].ff.net[2].merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff.net[2]._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].ff.net[2]._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff.net[2]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff.net[2]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff.net[2]._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[12].ff.net[2]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[12].ff.net[2]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn, accessed_by=DictGetItemGuardAccessor(attn) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn, 140581769893568) # attn_output, context_attn_output = self.attn( # diffusers/src/diffusers/models/transformers/transformer_flux.py:172 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[12].attn.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.training, 140591004393440) # attn_output, context_attn_output = self.attn( # diffusers/src/diffusers/models/transformers/transformer_flux.py:172 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_k, accessed_by=DictGetItemGuardAccessor(to_k) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.to_k, 140533117588368) # key = attn.to_k(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1717 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[12].attn.to_k.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.to_k.training, 140591004393408) # key = attn.to_k(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1717 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_k._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_k.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.to_k.lora_A, 140533117598448) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_k.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_k.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.to_k.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_k.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.to_k.lora_A['default_0'], 140533117594464) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_k.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_k.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.to_k.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_k.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_k.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.to_k.lora_A['default_0'].weight, 140526562441264) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_k.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.to_k.lora_B, 140533117593456) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_k.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_k.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.to_k.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_k.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.to_k.lora_B['default_0'], 140533117590000) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_k.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_k.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.to_k.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_k.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.to_k.base_layer, 140581769893712) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_k.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_k.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.to_k.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_k.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.to_k.lora_dropout, 140533117586592) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_k.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_k.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.to_k.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_k.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.to_k.lora_dropout['default_0'], 140533117586688) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_k.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_k.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.to_k.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_k.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[12].attn.to_k.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[12].attn.to_k.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_k.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[12].attn.to_k.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_k.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[12].attn.to_k.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[12].attn.to_k.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_k.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.to_k.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_k._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_k._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_k.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[12].attn.to_k.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[12].attn.to_k.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_k._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.to_k._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_k._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_k._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_k._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[12].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[12].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_q, accessed_by=DictGetItemGuardAccessor(to_q) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.to_q, 140533117594656) # query = attn.to_q(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1716 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[12].attn.to_q.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.to_q.training, 140591004393408) # query = attn.to_q(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1716 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_q._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_q.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.to_q.lora_A, 140533117595616) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_q.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_q.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.to_q.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_q.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.to_q.lora_A['default_0'], 140533117588032) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_q.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_q.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.to_q.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_q.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_q.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.to_q.lora_A['default_0'].weight, 140526562438384) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_q.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.to_q.lora_B, 140533117594416) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_q.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_q.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.to_q.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_q.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.to_q.lora_B['default_0'], 140533117595088) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_q.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_q.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.to_q.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_q.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.to_q.base_layer, 140581769893808) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_q.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_q.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.to_q.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_q.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.to_q.lora_dropout, 140533117593072) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_q.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_q.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.to_q.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_q.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.to_q.lora_dropout['default_0'], 140533117595760) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_q.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_q.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.to_q.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_q.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[12].attn.to_q.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[12].attn.to_q.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_q.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[12].attn.to_q.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_q.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[12].attn.to_q.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[12].attn.to_q.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_q.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.to_q.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_q._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_q._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_q.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[12].attn.to_q.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[12].attn.to_q.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_q._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.to_q._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_q._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_q._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_q._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[12].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[12].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_v, accessed_by=DictGetItemGuardAccessor(to_v) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.to_v, 140533117589280) # value = attn.to_v(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1718 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_v.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[12].attn.to_v.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_v.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.to_v.training, 140591004393408) # value = attn.to_v(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1718 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_v._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_v.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.to_v.lora_A, 140533117587408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_v.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_v.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.to_v.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_v.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.to_v.lora_A['default_0'], 140533117592976) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_v.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_v.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.to_v.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_v.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_v.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.to_v.lora_A['default_0'].weight, 140526562439024) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_v.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.to_v.lora_B, 140533117593024) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_v.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_v.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.to_v.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_v.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.to_v.lora_B['default_0'], 140533117590624) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_v.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_v.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.to_v.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_v.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.to_v.base_layer, 140581769893904) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_v.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_v.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.to_v.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_v.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.to_v.lora_dropout, 140533117593936) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_v.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_v.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.to_v.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_v.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.to_v.lora_dropout['default_0'], 140533117589424) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_v.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_v.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.to_v.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_v.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[12].attn.to_v.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[12].attn.to_v.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_v.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[12].attn.to_v.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_v.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[12].attn.to_v.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[12].attn.to_v.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_v.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.to_v.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_v._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_v._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_v.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[12].attn.to_v.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[12].attn.to_v.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_v._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.to_v._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_v._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_v._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_v._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[12].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[12].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.norm_k, accessed_by=DictGetItemGuardAccessor(norm_k) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.norm_k, 140581769893760) # if attn.norm_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1729 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.norm_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[12].attn.norm_k.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.norm_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.norm_k.training, 140591004393440) # if attn.norm_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1729 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.norm_k.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[12].attn.norm_k.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.norm_k._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.norm_k.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.norm_k.weight, 140581765995376) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.norm_k._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.norm_k._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.norm_k._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.norm_k._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.norm_q, accessed_by=DictGetItemGuardAccessor(norm_q) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.norm_q, 140581769893664) # if attn.norm_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1727 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.norm_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[12].attn.norm_q.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.norm_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.norm_q.training, 140591004393440) # if attn.norm_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1727 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.norm_q.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[12].attn.norm_q.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.norm_q._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.norm_q.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.norm_q.weight, 140581765995696) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.norm_q._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.norm_q._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.norm_q._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.norm_q._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_out, accessed_by=DictGetItemGuardAccessor(to_out) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.to_out, 140581769894096) # hidden_states = attn.to_out[0](hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1776 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_out.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_out.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.to_out.training, 140591004393440) # hidden_states = attn.to_out[0](hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1776 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_out[0], accessed_by=GetItemGuardAccessor(0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.to_out[0], 140533116935072) # hidden_states = attn.to_out[0](hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1776 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_out[0].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[12].attn.to_out[0].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_out[0].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.to_out[0].training, 140591004393408) # hidden_states = attn.to_out[0](hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1776 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_out[0]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_out[0].lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.to_out[0].lora_A, 140533116933920) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_out[0].lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_out[0].lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.to_out[0].lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_out[0].lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.to_out[0].lora_A['default_0'], 140533116942272) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_out[0].lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_out[0].lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.to_out[0].lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_out[0].lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_out[0].lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.to_out[0].lora_A['default_0'].weight, 140526699345888) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_out[0].lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.to_out[0].lora_B, 140533116933584) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_out[0].lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_out[0].lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.to_out[0].lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_out[0].lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.to_out[0].lora_B['default_0'], 140533116935312) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_out[0].lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_out[0].lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.to_out[0].lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_out[0].base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.to_out[0].base_layer, 140581769894144) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_out[0].base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_out[0].base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.to_out[0].base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_out[0].lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.to_out[0].lora_dropout, 140533116929600) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_out[0].lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_out[0].lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.to_out[0].lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_out[0].lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.to_out[0].lora_dropout['default_0'], 140533116934208) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_out[0].lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_out[0].lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.to_out[0].lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_out[0].scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[12].attn.to_out[0].scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[12].attn.to_out[0].scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_out[0].scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[12].attn.to_out[0].scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_out[0].use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[12].attn.to_out[0].use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[12].attn.to_out[0].use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_out[0].use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.to_out[0].use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_out[0]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_out[0]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_out[0].merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[12].attn.to_out[0].merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[12].attn.to_out[0].merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_out[0]._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.to_out[0]._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_out[0]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_out[0]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_out[0]._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[12].attn.to_out[0]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[12].attn.to_out[0]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_out[1], accessed_by=GetItemGuardAccessor(1) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.to_out[1], 140581769894192) # hidden_states = attn.to_out[1](hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1778 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_out[1].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_out[1].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.to_out[1].training, 140591004393440) # hidden_states = attn.to_out[1](hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1778 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.add_k_proj, accessed_by=DictGetItemGuardAccessor(add_k_proj) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.add_k_proj, 140533119772192) # encoder_hidden_states_key_proj = attn.add_k_proj(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1736 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.add_k_proj.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[12].attn.add_k_proj.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.add_k_proj.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.add_k_proj.training, 140591004393408) # encoder_hidden_states_key_proj = attn.add_k_proj(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1736 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.add_k_proj._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.add_k_proj.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.add_k_proj.lora_A, 140533118786896) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.add_k_proj.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.add_k_proj.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.add_k_proj.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.add_k_proj.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.add_k_proj.lora_A['default_0'], 140533118789440) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.add_k_proj.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.add_k_proj.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.add_k_proj.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.add_k_proj.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.add_k_proj.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.add_k_proj.lora_A['default_0'].weight, 140526654898096) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.add_k_proj.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.add_k_proj.lora_B, 140533118792032) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.add_k_proj.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.add_k_proj.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.add_k_proj.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.add_k_proj.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.add_k_proj.lora_B['default_0'], 140533118793424) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.add_k_proj.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.add_k_proj.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.add_k_proj.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.add_k_proj.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.add_k_proj.base_layer, 140581769893952) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.add_k_proj.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.add_k_proj.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.add_k_proj.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.add_k_proj.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.add_k_proj.lora_dropout, 140533118793472) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.add_k_proj.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.add_k_proj.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.add_k_proj.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.add_k_proj.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.add_k_proj.lora_dropout['default_0'], 140533118785216) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.add_k_proj.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.add_k_proj.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.add_k_proj.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.add_k_proj.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[12].attn.add_k_proj.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[12].attn.add_k_proj.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.add_k_proj.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[12].attn.add_k_proj.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.add_k_proj.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[12].attn.add_k_proj.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[12].attn.add_k_proj.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.add_k_proj.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.add_k_proj.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.add_k_proj._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.add_k_proj._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.add_k_proj.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[12].attn.add_k_proj.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[12].attn.add_k_proj.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.add_k_proj._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.add_k_proj._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.add_k_proj._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.add_k_proj._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.add_k_proj._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[12].attn.add_k_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[12].attn.add_k_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.add_q_proj, accessed_by=DictGetItemGuardAccessor(add_q_proj) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.add_q_proj, 140533116940448) # encoder_hidden_states_query_proj = attn.add_q_proj(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1735 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.add_q_proj.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[12].attn.add_q_proj.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.add_q_proj.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.add_q_proj.training, 140591004393408) # encoder_hidden_states_query_proj = attn.add_q_proj(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1735 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.add_q_proj._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.add_q_proj.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.add_q_proj.lora_A, 140533116934256) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.add_q_proj.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.add_q_proj.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.add_q_proj.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.add_q_proj.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.add_q_proj.lora_A['default_0'], 140533116931280) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.add_q_proj.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.add_q_proj.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.add_q_proj.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.add_q_proj.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.add_q_proj.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.add_q_proj.lora_A['default_0'].weight, 140526654887056) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.add_q_proj.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.add_q_proj.lora_B, 140533116934112) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.add_q_proj.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.add_q_proj.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.add_q_proj.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.add_q_proj.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.add_q_proj.lora_B['default_0'], 140533116940640) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.add_q_proj.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.add_q_proj.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.add_q_proj.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.add_q_proj.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.add_q_proj.base_layer, 140581769894048) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.add_q_proj.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.add_q_proj.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.add_q_proj.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.add_q_proj.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.add_q_proj.lora_dropout, 140533116931616) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.add_q_proj.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.add_q_proj.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.add_q_proj.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.add_q_proj.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.add_q_proj.lora_dropout['default_0'], 140533116940400) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.add_q_proj.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.add_q_proj.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.add_q_proj.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.add_q_proj.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[12].attn.add_q_proj.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[12].attn.add_q_proj.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.add_q_proj.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[12].attn.add_q_proj.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.add_q_proj.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[12].attn.add_q_proj.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[12].attn.add_q_proj.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.add_q_proj.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.add_q_proj.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.add_q_proj._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.add_q_proj._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.add_q_proj.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[12].attn.add_q_proj.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[12].attn.add_q_proj.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.add_q_proj._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.add_q_proj._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.add_q_proj._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.add_q_proj._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.add_q_proj._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[12].attn.add_q_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[12].attn.add_q_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.add_v_proj, accessed_by=DictGetItemGuardAccessor(add_v_proj) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.add_v_proj, 140533117587744) # encoder_hidden_states_value_proj = attn.add_v_proj(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1737 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.add_v_proj.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[12].attn.add_v_proj.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.add_v_proj.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.add_v_proj.training, 140591004393408) # encoder_hidden_states_value_proj = attn.add_v_proj(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1737 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.add_v_proj._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.add_v_proj.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.add_v_proj.lora_A, 140533117383488) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.add_v_proj.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.add_v_proj.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.add_v_proj.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.add_v_proj.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.add_v_proj.lora_A['default_0'], 140533116940832) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.add_v_proj.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.add_v_proj.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.add_v_proj.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.add_v_proj.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.add_v_proj.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.add_v_proj.lora_A['default_0'].weight, 140526654892976) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.add_v_proj.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.add_v_proj.lora_B, 140533117379696) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.add_v_proj.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.add_v_proj.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.add_v_proj.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.add_v_proj.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.add_v_proj.lora_B['default_0'], 140533116938576) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.add_v_proj.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.add_v_proj.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.add_v_proj.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.add_v_proj.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.add_v_proj.base_layer, 140581769894000) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.add_v_proj.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.add_v_proj.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.add_v_proj.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.add_v_proj.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.add_v_proj.lora_dropout, 140533117372016) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.add_v_proj.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.add_v_proj.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.add_v_proj.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.add_v_proj.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.add_v_proj.lora_dropout['default_0'], 140533117371440) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.add_v_proj.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.add_v_proj.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.add_v_proj.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.add_v_proj.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[12].attn.add_v_proj.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[12].attn.add_v_proj.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.add_v_proj.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[12].attn.add_v_proj.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.add_v_proj.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[12].attn.add_v_proj.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[12].attn.add_v_proj.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.add_v_proj.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.add_v_proj.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.add_v_proj._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.add_v_proj._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.add_v_proj.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[12].attn.add_v_proj.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[12].attn.add_v_proj.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.add_v_proj._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.add_v_proj._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.add_v_proj._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.add_v_proj._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.add_v_proj._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[12].attn.add_v_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[12].attn.add_v_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_add_out, accessed_by=DictGetItemGuardAccessor(to_add_out) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.to_add_out, 140533116943088) # encoder_hidden_states = attn.to_add_out(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1779 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_add_out.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[12].attn.to_add_out.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_add_out.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.to_add_out.training, 140591004393408) # encoder_hidden_states = attn.to_add_out(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1779 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_add_out._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_add_out.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.to_add_out.lora_A, 140533116939968) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_add_out.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_add_out.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.to_add_out.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_add_out.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.to_add_out.lora_A['default_0'], 140533118134368) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_add_out.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_add_out.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.to_add_out.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_add_out.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_add_out.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.to_add_out.lora_A['default_0'].weight, 140526699346608) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_add_out.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.to_add_out.lora_B, 140533116940016) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_add_out.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_add_out.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.to_add_out.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_add_out.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.to_add_out.lora_B['default_0'], 140533118138256) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_add_out.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_add_out.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.to_add_out.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_add_out.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.to_add_out.base_layer, 140581769894240) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_add_out.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_add_out.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.to_add_out.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_add_out.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.to_add_out.lora_dropout, 140533116943280) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_add_out.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_add_out.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.to_add_out.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_add_out.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.to_add_out.lora_dropout['default_0'], 140533116943376) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_add_out.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_add_out.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.to_add_out.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_add_out.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[12].attn.to_add_out.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[12].attn.to_add_out.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_add_out.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[12].attn.to_add_out.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_add_out.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[12].attn.to_add_out.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[12].attn.to_add_out.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_add_out.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.to_add_out.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_add_out._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_add_out._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_add_out.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[12].attn.to_add_out.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[12].attn.to_add_out.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_add_out._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.to_add_out._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_add_out._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_add_out._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.to_add_out._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[12].attn.to_add_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[12].attn.to_add_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.norm_added_k, accessed_by=DictGetItemGuardAccessor(norm_added_k) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.norm_added_k, 140581769894384) # if attn.norm_added_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1751 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.norm_added_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[12].attn.norm_added_k.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.norm_added_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.norm_added_k.training, 140591004393440) # if attn.norm_added_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1751 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.norm_added_k.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[12].attn.norm_added_k.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.norm_added_k._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.norm_added_k.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.norm_added_k.weight, 140581773230944) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.norm_added_k._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.norm_added_k._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.norm_added_k._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.norm_added_k._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.norm_added_q, accessed_by=DictGetItemGuardAccessor(norm_added_q) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.norm_added_q, 140581769894288) # if attn.norm_added_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1749 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.norm_added_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[12].attn.norm_added_q.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.norm_added_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.norm_added_q.training, 140591004393440) # if attn.norm_added_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1749 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.norm_added_q.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[12].attn.norm_added_q.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.norm_added_q._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.norm_added_q.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.norm_added_q.weight, 140581785355984) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.norm_added_q._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.norm_added_q._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.norm_added_q._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.norm_added_q._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.heads, accessed_by=DictGetItemGuardAccessor(heads) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[12].attn.heads == 24 # head_dim = inner_dim // attn.heads # diffusers/src/diffusers/models/attention_processor.py:1721 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn.processor, accessed_by=DictGetItemGuardAccessor(processor) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[12].attn.processor, 93831581524080) # attn_parameters = set(inspect.signature(self.processor.__call__).parameters.keys()) # diffusers/src/diffusers/models/attention_processor.py:479 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].attn.processor, 140581769893520) # return self.processor( # diffusers/src/diffusers/models/attention_processor.py:490 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].attn._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].norm1, accessed_by=DictGetItemGuardAccessor(norm1) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].norm1, 140581769893088) # norm_hidden_states, gate_msa, shift_mlp, scale_mlp, gate_mlp = self.norm1(hidden_states, emb=temb) # diffusers/src/diffusers/models/transformers/transformer_flux.py:165 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].norm1.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[12].norm1.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].norm1.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].norm1.training, 140591004393440) # norm_hidden_states, gate_msa, shift_mlp, scale_mlp, gate_mlp = self.norm1(hidden_states, emb=temb) # diffusers/src/diffusers/models/transformers/transformer_flux.py:165 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].norm1.emb, accessed_by=DictGetItemGuardAccessor(emb) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].norm1.emb, 140591004478624) # if self.emb is not None: # diffusers/src/diffusers/models/normalization.py:135 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].norm1._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].norm1.norm, accessed_by=DictGetItemGuardAccessor(norm) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].norm1.norm, 140581769893232) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:139 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].norm1.norm.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].norm1.norm.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].norm1.norm.training, 140591004393440) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:139 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].norm1.silu, accessed_by=DictGetItemGuardAccessor(silu) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].norm1.silu, 140581769893136) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].norm1.silu.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].norm1.silu.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].norm1.silu.training, 140591004393440) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].norm1.linear, accessed_by=DictGetItemGuardAccessor(linear) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].norm1.linear, 140533116874960) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].norm1.linear.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[12].norm1.linear.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].norm1.linear.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].norm1.linear.training, 140591004393408) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].norm1.linear._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].norm1.linear.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].norm1.linear.lora_A, 140533116862960) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].norm1.linear.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].norm1.linear.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].norm1.linear.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].norm1.linear.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].norm1.linear.lora_A['default_0'], 140533117584144) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].norm1.linear.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].norm1.linear.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].norm1.linear.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].norm1.linear.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].norm1.linear.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].norm1.linear.lora_A['default_0'].weight, 140526562441744) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].norm1.linear.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].norm1.linear.lora_B, 140533116874816) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].norm1.linear.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].norm1.linear.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].norm1.linear.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].norm1.linear.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].norm1.linear.lora_B['default_0'], 140533117594752) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].norm1.linear.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].norm1.linear.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].norm1.linear.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].norm1.linear.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].norm1.linear.base_layer, 140581769893184) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].norm1.linear.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].norm1.linear.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].norm1.linear.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].norm1.linear.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].norm1.linear.lora_dropout, 140533116873952) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].norm1.linear.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].norm1.linear.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].norm1.linear.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].norm1.linear.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].norm1.linear.lora_dropout['default_0'], 140533116871216) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].norm1.linear.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].norm1.linear.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].norm1.linear.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].norm1.linear.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[12].norm1.linear.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[12].norm1.linear.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].norm1.linear.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[12].norm1.linear.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].norm1.linear.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[12].norm1.linear.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[12].norm1.linear.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].norm1.linear.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].norm1.linear.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].norm1.linear._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].norm1.linear._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].norm1.linear.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[12].norm1.linear.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[12].norm1.linear.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].norm1.linear._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].norm1.linear._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].norm1.linear._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].norm1.linear._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].norm1.linear._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[12].norm1.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[12].norm1.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].norm1._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].norm1._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].norm1._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].norm1._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].norm2, accessed_by=DictGetItemGuardAccessor(norm2) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].norm2, 140581769894432) # norm_hidden_states = self.norm2(hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:182 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].norm2.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].norm2.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].norm2.training, 140591004393440) # norm_hidden_states = self.norm2(hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:182 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff_context, accessed_by=DictGetItemGuardAccessor(ff_context) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].ff_context, 140581769894768) # context_ff_output = self.ff_context(norm_encoder_hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:198 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff_context.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[12].ff_context.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff_context.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].ff_context.training, 140591004393440) # context_ff_output = self.ff_context(norm_encoder_hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:198 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff_context._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff_context.net, accessed_by=DictGetItemGuardAccessor(net) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].ff_context.net, 140581769894912) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[12].ff_context.net, 93831537618768) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- LENGTH_CHECK: len(L['self'].transformer_blocks[12].ff_context.net) == 3 # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff_context.net.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff_context.net.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].ff_context.net.training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff_context.net[0], accessed_by=GetItemGuardAccessor(0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].ff_context.net[0], 140581769894864) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff_context.net[0].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[12].ff_context.net[0].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff_context.net[0].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].ff_context.net[0].training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff_context.net[0]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff_context.net[0].proj, accessed_by=DictGetItemGuardAccessor(proj) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].ff_context.net[0].proj, 140533117014736) # hidden_states = self.proj(hidden_states) # diffusers/src/diffusers/models/activations.py:88 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff_context.net[0].proj.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[12].ff_context.net[0].proj.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff_context.net[0].proj.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].ff_context.net[0].proj.training, 140591004393408) # hidden_states = self.proj(hidden_states) # diffusers/src/diffusers/models/activations.py:88 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff_context.net[0].proj._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff_context.net[0].proj.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].ff_context.net[0].proj.lora_A, 140533117300224) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff_context.net[0].proj.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff_context.net[0].proj.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].ff_context.net[0].proj.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff_context.net[0].proj.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].ff_context.net[0].proj.lora_A['default_0'], 140533117290864) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff_context.net[0].proj.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff_context.net[0].proj.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].ff_context.net[0].proj.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff_context.net[0].proj.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff_context.net[0].proj.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].ff_context.net[0].proj.lora_A['default_0'].weight, 140526778740432) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff_context.net[0].proj.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].ff_context.net[0].proj.lora_B, 140533117291488) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff_context.net[0].proj.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff_context.net[0].proj.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].ff_context.net[0].proj.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff_context.net[0].proj.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].ff_context.net[0].proj.lora_B['default_0'], 140533117299600) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff_context.net[0].proj.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff_context.net[0].proj.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].ff_context.net[0].proj.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff_context.net[0].proj.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].ff_context.net[0].proj.base_layer, 140581769894960) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff_context.net[0].proj.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff_context.net[0].proj.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].ff_context.net[0].proj.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff_context.net[0].proj.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].ff_context.net[0].proj.lora_dropout, 140533117296720) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff_context.net[0].proj.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff_context.net[0].proj.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].ff_context.net[0].proj.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff_context.net[0].proj.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].ff_context.net[0].proj.lora_dropout['default_0'], 140533117299648) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff_context.net[0].proj.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff_context.net[0].proj.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].ff_context.net[0].proj.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff_context.net[0].proj.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[12].ff_context.net[0].proj.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[12].ff_context.net[0].proj.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff_context.net[0].proj.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[12].ff_context.net[0].proj.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff_context.net[0].proj.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[12].ff_context.net[0].proj.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[12].ff_context.net[0].proj.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff_context.net[0].proj.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].ff_context.net[0].proj.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff_context.net[0].proj._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff_context.net[0].proj._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff_context.net[0].proj.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[12].ff_context.net[0].proj.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[12].ff_context.net[0].proj.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff_context.net[0].proj._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].ff_context.net[0].proj._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff_context.net[0].proj._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff_context.net[0].proj._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff_context.net[0].proj._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[12].ff_context.net[0].proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[12].ff_context.net[0].proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff_context.net[0].approximate, accessed_by=DictGetItemGuardAccessor(approximate) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[12].ff_context.net[0].approximate == 'tanh' # return F.gelu(gate, approximate=self.approximate) # diffusers/src/diffusers/models/activations.py:83 in gelu V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff_context.net[0]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff_context.net[0]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff_context.net[0]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff_context.net[0]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff_context.net[1], accessed_by=GetItemGuardAccessor(1) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].ff_context.net[1], 140581769895056) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff_context.net[1].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff_context.net[1].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].ff_context.net[1].training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff_context.net[2], accessed_by=GetItemGuardAccessor(2) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].ff_context.net[2], 140533117300608) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff_context.net[2].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[12].ff_context.net[2].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff_context.net[2].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].ff_context.net[2].training, 140591004393408) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff_context.net[2]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff_context.net[2].lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].ff_context.net[2].lora_A, 140533117297872) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff_context.net[2].lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff_context.net[2].lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].ff_context.net[2].lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff_context.net[2].lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].ff_context.net[2].lora_A['default_0'], 140533117304208) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff_context.net[2].lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff_context.net[2].lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].ff_context.net[2].lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff_context.net[2].lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff_context.net[2].lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].ff_context.net[2].lora_A['default_0'].weight, 140526778739792) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff_context.net[2].lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].ff_context.net[2].lora_B, 140533117299168) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff_context.net[2].lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff_context.net[2].lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].ff_context.net[2].lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff_context.net[2].lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].ff_context.net[2].lora_B['default_0'], 140533117298544) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff_context.net[2].lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff_context.net[2].lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].ff_context.net[2].lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff_context.net[2].base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].ff_context.net[2].base_layer, 140581769895104) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff_context.net[2].base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff_context.net[2].base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].ff_context.net[2].base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff_context.net[2].lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].ff_context.net[2].lora_dropout, 140533117291632) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff_context.net[2].lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff_context.net[2].lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].ff_context.net[2].lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff_context.net[2].lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].ff_context.net[2].lora_dropout['default_0'], 140533117300752) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff_context.net[2].lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff_context.net[2].lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].ff_context.net[2].lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff_context.net[2].scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[12].ff_context.net[2].scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[12].ff_context.net[2].scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff_context.net[2].scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[12].ff_context.net[2].scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff_context.net[2].use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[12].ff_context.net[2].use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[12].ff_context.net[2].use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff_context.net[2].use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].ff_context.net[2].use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff_context.net[2]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff_context.net[2]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff_context.net[2].merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[12].ff_context.net[2].merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[12].ff_context.net[2].merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff_context.net[2]._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].ff_context.net[2]._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff_context.net[2]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff_context.net[2]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff_context.net[2]._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[12].ff_context.net[2]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[12].ff_context.net[2]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff_context._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff_context._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff_context._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].ff_context._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].norm1_context, accessed_by=DictGetItemGuardAccessor(norm1_context) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].norm1_context, 140581769893280) # norm_encoder_hidden_states, c_gate_msa, c_shift_mlp, c_scale_mlp, c_gate_mlp = self.norm1_context( # diffusers/src/diffusers/models/transformers/transformer_flux.py:167 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].norm1_context.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[12].norm1_context.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].norm1_context.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].norm1_context.training, 140591004393440) # norm_encoder_hidden_states, c_gate_msa, c_shift_mlp, c_scale_mlp, c_gate_mlp = self.norm1_context( # diffusers/src/diffusers/models/transformers/transformer_flux.py:167 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].norm1_context.emb, accessed_by=DictGetItemGuardAccessor(emb) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].norm1_context.emb, 140591004478624) # if self.emb is not None: # diffusers/src/diffusers/models/normalization.py:135 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].norm1_context._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].norm1_context.norm, accessed_by=DictGetItemGuardAccessor(norm) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].norm1_context.norm, 140581769893472) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:139 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].norm1_context.norm.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].norm1_context.norm.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].norm1_context.norm.training, 140591004393440) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:139 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].norm1_context.silu, accessed_by=DictGetItemGuardAccessor(silu) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].norm1_context.silu, 140581769893376) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].norm1_context.silu.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].norm1_context.silu.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].norm1_context.silu.training, 140591004393440) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].norm1_context.linear, accessed_by=DictGetItemGuardAccessor(linear) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].norm1_context.linear, 140533117590576) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].norm1_context.linear.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[12].norm1_context.linear.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].norm1_context.linear.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].norm1_context.linear.training, 140591004393408) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].norm1_context.linear._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].norm1_context.linear.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].norm1_context.linear.lora_A, 140533117587120) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].norm1_context.linear.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].norm1_context.linear.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].norm1_context.linear.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].norm1_context.linear.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].norm1_context.linear.lora_A['default_0'], 140533117593696) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].norm1_context.linear.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].norm1_context.linear.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].norm1_context.linear.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].norm1_context.linear.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].norm1_context.linear.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].norm1_context.linear.lora_A['default_0'].weight, 140526562431664) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].norm1_context.linear.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].norm1_context.linear.lora_B, 140533117586640) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].norm1_context.linear.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].norm1_context.linear.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].norm1_context.linear.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].norm1_context.linear.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].norm1_context.linear.lora_B['default_0'], 140533117590048) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].norm1_context.linear.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].norm1_context.linear.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].norm1_context.linear.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].norm1_context.linear.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].norm1_context.linear.base_layer, 140581769893424) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].norm1_context.linear.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].norm1_context.linear.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].norm1_context.linear.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].norm1_context.linear.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].norm1_context.linear.lora_dropout, 140533117593552) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].norm1_context.linear.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].norm1_context.linear.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].norm1_context.linear.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].norm1_context.linear.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].norm1_context.linear.lora_dropout['default_0'], 140533117588704) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].norm1_context.linear.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].norm1_context.linear.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].norm1_context.linear.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].norm1_context.linear.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[12].norm1_context.linear.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[12].norm1_context.linear.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].norm1_context.linear.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[12].norm1_context.linear.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].norm1_context.linear.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[12].norm1_context.linear.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[12].norm1_context.linear.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].norm1_context.linear.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].norm1_context.linear.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].norm1_context.linear._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].norm1_context.linear._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].norm1_context.linear.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[12].norm1_context.linear.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[12].norm1_context.linear.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].norm1_context.linear._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].norm1_context.linear._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].norm1_context.linear._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].norm1_context.linear._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].norm1_context.linear._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[12].norm1_context.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[12].norm1_context.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].norm1_context._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].norm1_context._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].norm1_context._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].norm1_context._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].norm2_context, accessed_by=DictGetItemGuardAccessor(norm2_context) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].norm2_context, 140581769894480) # norm_encoder_hidden_states = self.norm2_context(encoder_hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:195 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].norm2_context.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12].norm2_context.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[12].norm2_context.training, 140591004393440) # norm_encoder_hidden_states = self.norm2_context(encoder_hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:195 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[12]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | +- GuardManager: source=L['self'].transformer_blocks[13], accessed_by=GetItemGuardAccessor(13) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13], 140581769892752) # for index_block, block in enumerate(self.transformer_blocks): # diffusers/src/diffusers/models/transformers/transformer_flux.py:471 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[13].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].training, 140591004393440) # for index_block, block in enumerate(self.transformer_blocks): # diffusers/src/diffusers/models/transformers/transformer_flux.py:471 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff, accessed_by=DictGetItemGuardAccessor(ff) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].ff, 140581769896400) # ff_output = self.ff(norm_hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:185 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[13].ff.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].ff.training, 140591004393440) # ff_output = self.ff(norm_hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:185 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff.net, accessed_by=DictGetItemGuardAccessor(net) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].ff.net, 140581769896640) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[13].ff.net, 93831537618768) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- LENGTH_CHECK: len(L['self'].transformer_blocks[13].ff.net) == 3 # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff.net.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff.net.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].ff.net.training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff.net[0], accessed_by=GetItemGuardAccessor(0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].ff.net[0], 140581769896592) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff.net[0].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[13].ff.net[0].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff.net[0].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].ff.net[0].training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff.net[0]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff.net[0].proj, accessed_by=DictGetItemGuardAccessor(proj) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].ff.net[0].proj, 140533117444368) # hidden_states = self.proj(hidden_states) # diffusers/src/diffusers/models/activations.py:88 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff.net[0].proj.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[13].ff.net[0].proj.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff.net[0].proj.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].ff.net[0].proj.training, 140591004393408) # hidden_states = self.proj(hidden_states) # diffusers/src/diffusers/models/activations.py:88 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff.net[0].proj._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff.net[0].proj.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].ff.net[0].proj.lora_A, 140533117449408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff.net[0].proj.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff.net[0].proj.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].ff.net[0].proj.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff.net[0].proj.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].ff.net[0].proj.lora_A['default_0'], 140533117444320) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff.net[0].proj.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff.net[0].proj.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].ff.net[0].proj.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff.net[0].proj.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff.net[0].proj.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].ff.net[0].proj.lora_A['default_0'].weight, 140526687131888) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff.net[0].proj.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].ff.net[0].proj.lora_B, 140533117450992) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff.net[0].proj.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff.net[0].proj.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].ff.net[0].proj.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff.net[0].proj.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].ff.net[0].proj.lora_B['default_0'], 140533117443792) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff.net[0].proj.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff.net[0].proj.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].ff.net[0].proj.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff.net[0].proj.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].ff.net[0].proj.base_layer, 140581769896688) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff.net[0].proj.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff.net[0].proj.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].ff.net[0].proj.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff.net[0].proj.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].ff.net[0].proj.lora_dropout, 140533117451328) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff.net[0].proj.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff.net[0].proj.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].ff.net[0].proj.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff.net[0].proj.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].ff.net[0].proj.lora_dropout['default_0'], 140533117443648) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff.net[0].proj.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff.net[0].proj.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].ff.net[0].proj.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff.net[0].proj.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[13].ff.net[0].proj.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[13].ff.net[0].proj.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff.net[0].proj.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[13].ff.net[0].proj.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff.net[0].proj.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[13].ff.net[0].proj.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[13].ff.net[0].proj.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff.net[0].proj.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].ff.net[0].proj.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff.net[0].proj._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff.net[0].proj._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff.net[0].proj.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[13].ff.net[0].proj.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[13].ff.net[0].proj.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff.net[0].proj._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].ff.net[0].proj._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff.net[0].proj._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff.net[0].proj._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff.net[0].proj._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[13].ff.net[0].proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[13].ff.net[0].proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff.net[0].approximate, accessed_by=DictGetItemGuardAccessor(approximate) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[13].ff.net[0].approximate == 'tanh' # return F.gelu(gate, approximate=self.approximate) # diffusers/src/diffusers/models/activations.py:83 in gelu V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff.net[0]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff.net[0]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff.net[0]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff.net[0]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff.net[1], accessed_by=GetItemGuardAccessor(1) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].ff.net[1], 140581769896736) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff.net[1].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff.net[1].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].ff.net[1].training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff.net[2], accessed_by=GetItemGuardAccessor(2) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].ff.net[2], 140533117436208) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff.net[2].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[13].ff.net[2].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff.net[2].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].ff.net[2].training, 140591004393408) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff.net[2]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff.net[2].lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].ff.net[2].lora_A, 140533117447680) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff.net[2].lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff.net[2].lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].ff.net[2].lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff.net[2].lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].ff.net[2].lora_A['default_0'], 140533117442208) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff.net[2].lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff.net[2].lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].ff.net[2].lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff.net[2].lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff.net[2].lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].ff.net[2].lora_A['default_0'].weight, 140531261946784) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff.net[2].lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].ff.net[2].lora_B, 140533117450272) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff.net[2].lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff.net[2].lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].ff.net[2].lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff.net[2].lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].ff.net[2].lora_B['default_0'], 140533117443456) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff.net[2].lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff.net[2].lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].ff.net[2].lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff.net[2].base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].ff.net[2].base_layer, 140581769896784) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff.net[2].base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff.net[2].base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].ff.net[2].base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff.net[2].lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].ff.net[2].lora_dropout, 140533117451376) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff.net[2].lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff.net[2].lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].ff.net[2].lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff.net[2].lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].ff.net[2].lora_dropout['default_0'], 140533117440576) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff.net[2].lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff.net[2].lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].ff.net[2].lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff.net[2].scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[13].ff.net[2].scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[13].ff.net[2].scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff.net[2].scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[13].ff.net[2].scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff.net[2].use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[13].ff.net[2].use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[13].ff.net[2].use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff.net[2].use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].ff.net[2].use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff.net[2]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff.net[2]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff.net[2].merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[13].ff.net[2].merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[13].ff.net[2].merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff.net[2]._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].ff.net[2]._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff.net[2]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff.net[2]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff.net[2]._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[13].ff.net[2]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[13].ff.net[2]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn, accessed_by=DictGetItemGuardAccessor(attn) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn, 140581769895632) # attn_output, context_attn_output = self.attn( # diffusers/src/diffusers/models/transformers/transformer_flux.py:172 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[13].attn.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.training, 140591004393440) # attn_output, context_attn_output = self.attn( # diffusers/src/diffusers/models/transformers/transformer_flux.py:172 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_k, accessed_by=DictGetItemGuardAccessor(to_k) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.to_k, 140533117182320) # key = attn.to_k(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1717 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[13].attn.to_k.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.to_k.training, 140591004393408) # key = attn.to_k(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1717 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_k._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_k.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.to_k.lora_A, 140533117181120) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_k.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_k.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.to_k.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_k.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.to_k.lora_A['default_0'], 140533117188752) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_k.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_k.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.to_k.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_k.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_k.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.to_k.lora_A['default_0'].weight, 140526661499328) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_k.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.to_k.lora_B, 140533117186352) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_k.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_k.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.to_k.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_k.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.to_k.lora_B['default_0'], 140533117182464) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_k.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_k.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.to_k.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_k.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.to_k.base_layer, 140581769895776) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_k.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_k.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.to_k.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_k.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.to_k.lora_dropout, 140533117189616) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_k.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_k.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.to_k.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_k.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.to_k.lora_dropout['default_0'], 140533117182272) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_k.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_k.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.to_k.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_k.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[13].attn.to_k.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[13].attn.to_k.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_k.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[13].attn.to_k.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_k.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[13].attn.to_k.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[13].attn.to_k.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_k.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.to_k.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_k._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_k._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_k.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[13].attn.to_k.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[13].attn.to_k.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_k._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.to_k._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_k._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_k._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_k._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[13].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[13].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_q, accessed_by=DictGetItemGuardAccessor(to_q) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.to_q, 140533117736208) # query = attn.to_q(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1716 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[13].attn.to_q.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.to_q.training, 140591004393408) # query = attn.to_q(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1716 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_q._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_q.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.to_q.lora_A, 140533117742352) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_q.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_q.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.to_q.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_q.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.to_q.lora_A['default_0'], 140533117176656) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_q.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_q.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.to_q.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_q.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_q.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.to_q.lora_A['default_0'].weight, 140526778735072) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_q.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.to_q.lora_B, 140533117743552) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_q.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_q.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.to_q.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_q.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.to_q.lora_B['default_0'], 140533117181984) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_q.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_q.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.to_q.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_q.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.to_q.base_layer, 140581769895872) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_q.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_q.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.to_q.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_q.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.to_q.lora_dropout, 140533117736928) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_q.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_q.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.to_q.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_q.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.to_q.lora_dropout['default_0'], 140533117732128) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_q.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_q.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.to_q.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_q.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[13].attn.to_q.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[13].attn.to_q.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_q.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[13].attn.to_q.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_q.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[13].attn.to_q.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[13].attn.to_q.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_q.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.to_q.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_q._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_q._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_q.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[13].attn.to_q.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[13].attn.to_q.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_q._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.to_q._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_q._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_q._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_q._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[13].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[13].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_v, accessed_by=DictGetItemGuardAccessor(to_v) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.to_v, 140533117178960) # value = attn.to_v(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1718 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_v.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[13].attn.to_v.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_v.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.to_v.training, 140591004393408) # value = attn.to_v(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1718 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_v._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_v.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.to_v.lora_A, 140533117187600) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_v.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_v.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.to_v.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_v.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.to_v.lora_A['default_0'], 140533117336016) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_v.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_v.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.to_v.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_v.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_v.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.to_v.lora_A['default_0'].weight, 140526661498608) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_v.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.to_v.lora_B, 140533117326944) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_v.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_v.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.to_v.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_v.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.to_v.lora_B['default_0'], 140533117335248) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_v.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_v.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.to_v.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_v.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.to_v.base_layer, 140581769895968) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_v.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_v.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.to_v.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_v.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.to_v.lora_dropout, 140533117181408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_v.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_v.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.to_v.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_v.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.to_v.lora_dropout['default_0'], 140533117189040) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_v.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_v.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.to_v.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_v.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[13].attn.to_v.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[13].attn.to_v.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_v.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[13].attn.to_v.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_v.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[13].attn.to_v.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[13].attn.to_v.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_v.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.to_v.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_v._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_v._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_v.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[13].attn.to_v.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[13].attn.to_v.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_v._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.to_v._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_v._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_v._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_v._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[13].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[13].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.norm_k, accessed_by=DictGetItemGuardAccessor(norm_k) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.norm_k, 140581769895824) # if attn.norm_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1729 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.norm_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[13].attn.norm_k.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.norm_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.norm_k.training, 140591004393440) # if attn.norm_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1729 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.norm_k.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[13].attn.norm_k.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.norm_k._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.norm_k.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.norm_k.weight, 140581765997456) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.norm_k._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.norm_k._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.norm_k._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.norm_k._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.norm_q, accessed_by=DictGetItemGuardAccessor(norm_q) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.norm_q, 140581769895728) # if attn.norm_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1727 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.norm_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[13].attn.norm_q.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.norm_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.norm_q.training, 140591004393440) # if attn.norm_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1727 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.norm_q.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[13].attn.norm_q.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.norm_q._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.norm_q.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.norm_q.weight, 140581765997536) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.norm_q._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.norm_q._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.norm_q._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.norm_q._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_out, accessed_by=DictGetItemGuardAccessor(to_out) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.to_out, 140581769896160) # hidden_states = attn.to_out[0](hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1776 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_out.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_out.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.to_out.training, 140591004393440) # hidden_states = attn.to_out[0](hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1776 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_out[0], accessed_by=GetItemGuardAccessor(0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.to_out[0], 140533117324736) # hidden_states = attn.to_out[0](hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1776 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_out[0].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[13].attn.to_out[0].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_out[0].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.to_out[0].training, 140591004393408) # hidden_states = attn.to_out[0](hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1776 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_out[0]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_out[0].lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.to_out[0].lora_A, 140533117710784) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_out[0].lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_out[0].lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.to_out[0].lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_out[0].lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.to_out[0].lora_A['default_0'], 140533117712464) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_out[0].lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_out[0].lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.to_out[0].lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_out[0].lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_out[0].lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.to_out[0].lora_A['default_0'].weight, 140537319871872) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_out[0].lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.to_out[0].lora_B, 140533117712752) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_out[0].lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_out[0].lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.to_out[0].lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_out[0].lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.to_out[0].lora_B['default_0'], 140533117703440) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_out[0].lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_out[0].lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.to_out[0].lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_out[0].base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.to_out[0].base_layer, 140581769896208) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_out[0].base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_out[0].base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.to_out[0].base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_out[0].lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.to_out[0].lora_dropout, 140533117335056) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_out[0].lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_out[0].lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.to_out[0].lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_out[0].lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.to_out[0].lora_dropout['default_0'], 140533117336304) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_out[0].lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_out[0].lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.to_out[0].lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_out[0].scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[13].attn.to_out[0].scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[13].attn.to_out[0].scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_out[0].scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[13].attn.to_out[0].scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_out[0].use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[13].attn.to_out[0].use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[13].attn.to_out[0].use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_out[0].use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.to_out[0].use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_out[0]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_out[0]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_out[0].merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[13].attn.to_out[0].merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[13].attn.to_out[0].merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_out[0]._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.to_out[0]._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_out[0]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_out[0]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_out[0]._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[13].attn.to_out[0]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[13].attn.to_out[0]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_out[1], accessed_by=GetItemGuardAccessor(1) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.to_out[1], 140581769896256) # hidden_states = attn.to_out[1](hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1778 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_out[1].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_out[1].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.to_out[1].training, 140591004393440) # hidden_states = attn.to_out[1](hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1778 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.add_k_proj, accessed_by=DictGetItemGuardAccessor(add_k_proj) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.add_k_proj, 140533117336352) # encoder_hidden_states_key_proj = attn.add_k_proj(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1736 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.add_k_proj.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[13].attn.add_k_proj.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.add_k_proj.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.add_k_proj.training, 140591004393408) # encoder_hidden_states_key_proj = attn.add_k_proj(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1736 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.add_k_proj._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.add_k_proj.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.add_k_proj.lora_A, 140533117335392) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.add_k_proj.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.add_k_proj.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.add_k_proj.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.add_k_proj.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.add_k_proj.lora_A['default_0'], 140533117324784) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.add_k_proj.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.add_k_proj.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.add_k_proj.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.add_k_proj.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.add_k_proj.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.add_k_proj.lora_A['default_0'].weight, 140526661491088) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.add_k_proj.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.add_k_proj.lora_B, 140533117335344) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.add_k_proj.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.add_k_proj.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.add_k_proj.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.add_k_proj.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.add_k_proj.lora_B['default_0'], 140533117337456) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.add_k_proj.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.add_k_proj.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.add_k_proj.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.add_k_proj.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.add_k_proj.base_layer, 140581769896016) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.add_k_proj.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.add_k_proj.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.add_k_proj.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.add_k_proj.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.add_k_proj.lora_dropout, 140533117333136) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.add_k_proj.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.add_k_proj.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.add_k_proj.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.add_k_proj.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.add_k_proj.lora_dropout['default_0'], 140533117336400) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.add_k_proj.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.add_k_proj.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.add_k_proj.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.add_k_proj.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[13].attn.add_k_proj.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[13].attn.add_k_proj.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.add_k_proj.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[13].attn.add_k_proj.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.add_k_proj.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[13].attn.add_k_proj.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[13].attn.add_k_proj.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.add_k_proj.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.add_k_proj.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.add_k_proj._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.add_k_proj._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.add_k_proj.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[13].attn.add_k_proj.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[13].attn.add_k_proj.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.add_k_proj._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.add_k_proj._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.add_k_proj._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.add_k_proj._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.add_k_proj._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[13].attn.add_k_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[13].attn.add_k_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.add_q_proj, accessed_by=DictGetItemGuardAccessor(add_q_proj) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.add_q_proj, 140533117334864) # encoder_hidden_states_query_proj = attn.add_q_proj(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1735 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.add_q_proj.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[13].attn.add_q_proj.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.add_q_proj.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.add_q_proj.training, 140591004393408) # encoder_hidden_states_query_proj = attn.add_q_proj(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1735 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.add_q_proj._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.add_q_proj.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.add_q_proj.lora_A, 140533117325504) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.add_q_proj.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.add_q_proj.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.add_q_proj.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.add_q_proj.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.add_q_proj.lora_A['default_0'], 140533117332176) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.add_q_proj.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.add_q_proj.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.add_q_proj.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.add_q_proj.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.add_q_proj.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.add_q_proj.lora_A['default_0'].weight, 140533132574320) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.add_q_proj.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.add_q_proj.lora_B, 140533117323872) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.add_q_proj.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.add_q_proj.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.add_q_proj.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.add_q_proj.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.add_q_proj.lora_B['default_0'], 140533117333712) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.add_q_proj.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.add_q_proj.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.add_q_proj.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.add_q_proj.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.add_q_proj.base_layer, 140581769896112) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.add_q_proj.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.add_q_proj.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.add_q_proj.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.add_q_proj.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.add_q_proj.lora_dropout, 140533117326800) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.add_q_proj.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.add_q_proj.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.add_q_proj.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.add_q_proj.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.add_q_proj.lora_dropout['default_0'], 140533117332656) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.add_q_proj.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.add_q_proj.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.add_q_proj.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.add_q_proj.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[13].attn.add_q_proj.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[13].attn.add_q_proj.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.add_q_proj.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[13].attn.add_q_proj.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.add_q_proj.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[13].attn.add_q_proj.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[13].attn.add_q_proj.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.add_q_proj.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.add_q_proj.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.add_q_proj._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.add_q_proj._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.add_q_proj.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[13].attn.add_q_proj.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[13].attn.add_q_proj.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.add_q_proj._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.add_q_proj._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.add_q_proj._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.add_q_proj._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.add_q_proj._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[13].attn.add_q_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[13].attn.add_q_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.add_v_proj, accessed_by=DictGetItemGuardAccessor(add_v_proj) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.add_v_proj, 140533117334240) # encoder_hidden_states_value_proj = attn.add_v_proj(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1737 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.add_v_proj.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[13].attn.add_v_proj.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.add_v_proj.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.add_v_proj.training, 140591004393408) # encoder_hidden_states_value_proj = attn.add_v_proj(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1737 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.add_v_proj._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.add_v_proj.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.add_v_proj.lora_A, 140533117331696) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.add_v_proj.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.add_v_proj.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.add_v_proj.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.add_v_proj.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.add_v_proj.lora_A['default_0'], 140533117330976) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.add_v_proj.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.add_v_proj.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.add_v_proj.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.add_v_proj.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.add_v_proj.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.add_v_proj.lora_A['default_0'].weight, 140526661489888) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.add_v_proj.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.add_v_proj.lora_B, 140533117323248) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.add_v_proj.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.add_v_proj.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.add_v_proj.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.add_v_proj.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.add_v_proj.lora_B['default_0'], 140533117333424) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.add_v_proj.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.add_v_proj.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.add_v_proj.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.add_v_proj.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.add_v_proj.base_layer, 140581769896064) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.add_v_proj.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.add_v_proj.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.add_v_proj.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.add_v_proj.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.add_v_proj.lora_dropout, 140533117322960) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.add_v_proj.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.add_v_proj.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.add_v_proj.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.add_v_proj.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.add_v_proj.lora_dropout['default_0'], 140533117324640) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.add_v_proj.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.add_v_proj.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.add_v_proj.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.add_v_proj.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[13].attn.add_v_proj.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[13].attn.add_v_proj.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.add_v_proj.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[13].attn.add_v_proj.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.add_v_proj.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[13].attn.add_v_proj.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[13].attn.add_v_proj.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.add_v_proj.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.add_v_proj.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.add_v_proj._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.add_v_proj._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.add_v_proj.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[13].attn.add_v_proj.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[13].attn.add_v_proj.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.add_v_proj._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.add_v_proj._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.add_v_proj._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.add_v_proj._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.add_v_proj._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[13].attn.add_v_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[13].attn.add_v_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_add_out, accessed_by=DictGetItemGuardAccessor(to_add_out) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.to_add_out, 140533117106544) # encoder_hidden_states = attn.to_add_out(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1779 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_add_out.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[13].attn.to_add_out.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_add_out.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.to_add_out.training, 140591004393408) # encoder_hidden_states = attn.to_add_out(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1779 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_add_out._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_add_out.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.to_add_out.lora_A, 140533117097856) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_add_out.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_add_out.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.to_add_out.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_add_out.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.to_add_out.lora_A['default_0'], 140533117449600) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_add_out.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_add_out.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.to_add_out.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_add_out.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_add_out.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.to_add_out.lora_A['default_0'].weight, 140526687135408) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_add_out.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.to_add_out.lora_B, 140533117098912) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_add_out.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_add_out.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.to_add_out.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_add_out.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.to_add_out.lora_B['default_0'], 140533117442256) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_add_out.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_add_out.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.to_add_out.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_add_out.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.to_add_out.base_layer, 140581769896304) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_add_out.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_add_out.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.to_add_out.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_add_out.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.to_add_out.lora_dropout, 140533117105056) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_add_out.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_add_out.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.to_add_out.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_add_out.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.to_add_out.lora_dropout['default_0'], 140533117098000) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_add_out.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_add_out.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.to_add_out.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_add_out.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[13].attn.to_add_out.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[13].attn.to_add_out.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_add_out.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[13].attn.to_add_out.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_add_out.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[13].attn.to_add_out.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[13].attn.to_add_out.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_add_out.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.to_add_out.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_add_out._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_add_out._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_add_out.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[13].attn.to_add_out.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[13].attn.to_add_out.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_add_out._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.to_add_out._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_add_out._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_add_out._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.to_add_out._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[13].attn.to_add_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[13].attn.to_add_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.norm_added_k, accessed_by=DictGetItemGuardAccessor(norm_added_k) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.norm_added_k, 140581769896448) # if attn.norm_added_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1751 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.norm_added_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[13].attn.norm_added_k.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.norm_added_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.norm_added_k.training, 140591004393440) # if attn.norm_added_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1751 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.norm_added_k.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[13].attn.norm_added_k.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.norm_added_k._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.norm_added_k.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.norm_added_k.weight, 140581765997296) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.norm_added_k._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.norm_added_k._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.norm_added_k._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.norm_added_k._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.norm_added_q, accessed_by=DictGetItemGuardAccessor(norm_added_q) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.norm_added_q, 140581769896352) # if attn.norm_added_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1749 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.norm_added_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[13].attn.norm_added_q.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.norm_added_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.norm_added_q.training, 140591004393440) # if attn.norm_added_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1749 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.norm_added_q.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[13].attn.norm_added_q.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.norm_added_q._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.norm_added_q.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.norm_added_q.weight, 140581765997376) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.norm_added_q._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.norm_added_q._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.norm_added_q._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.norm_added_q._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.heads, accessed_by=DictGetItemGuardAccessor(heads) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[13].attn.heads == 24 # head_dim = inner_dim // attn.heads # diffusers/src/diffusers/models/attention_processor.py:1721 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn.processor, accessed_by=DictGetItemGuardAccessor(processor) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[13].attn.processor, 93831581524080) # attn_parameters = set(inspect.signature(self.processor.__call__).parameters.keys()) # diffusers/src/diffusers/models/attention_processor.py:479 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].attn.processor, 140581769895584) # return self.processor( # diffusers/src/diffusers/models/attention_processor.py:490 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].attn._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].norm1, accessed_by=DictGetItemGuardAccessor(norm1) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].norm1, 140581769895152) # norm_hidden_states, gate_msa, shift_mlp, scale_mlp, gate_mlp = self.norm1(hidden_states, emb=temb) # diffusers/src/diffusers/models/transformers/transformer_flux.py:165 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].norm1.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[13].norm1.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].norm1.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].norm1.training, 140591004393440) # norm_hidden_states, gate_msa, shift_mlp, scale_mlp, gate_mlp = self.norm1(hidden_states, emb=temb) # diffusers/src/diffusers/models/transformers/transformer_flux.py:165 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].norm1.emb, accessed_by=DictGetItemGuardAccessor(emb) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].norm1.emb, 140591004478624) # if self.emb is not None: # diffusers/src/diffusers/models/normalization.py:135 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].norm1._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].norm1.norm, accessed_by=DictGetItemGuardAccessor(norm) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].norm1.norm, 140581769895296) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:139 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].norm1.norm.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].norm1.norm.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].norm1.norm.training, 140591004393440) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:139 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].norm1.silu, accessed_by=DictGetItemGuardAccessor(silu) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].norm1.silu, 140581769895200) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].norm1.silu.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].norm1.silu.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].norm1.silu.training, 140591004393440) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].norm1.linear, accessed_by=DictGetItemGuardAccessor(linear) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].norm1.linear, 140533117291344) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].norm1.linear.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[13].norm1.linear.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].norm1.linear.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].norm1.linear.training, 140591004393408) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].norm1.linear._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].norm1.linear.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].norm1.linear.lora_A, 140533117292304) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].norm1.linear.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].norm1.linear.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].norm1.linear.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].norm1.linear.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].norm1.linear.lora_A['default_0'], 140533117298112) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].norm1.linear.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].norm1.linear.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].norm1.linear.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].norm1.linear.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].norm1.linear.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].norm1.linear.lora_A['default_0'].weight, 140526778731312) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].norm1.linear.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].norm1.linear.lora_B, 140533117290912) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].norm1.linear.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].norm1.linear.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].norm1.linear.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].norm1.linear.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].norm1.linear.lora_B['default_0'], 140533117033232) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].norm1.linear.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].norm1.linear.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].norm1.linear.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].norm1.linear.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].norm1.linear.base_layer, 140581769895248) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].norm1.linear.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].norm1.linear.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].norm1.linear.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].norm1.linear.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].norm1.linear.lora_dropout, 140533117288608) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].norm1.linear.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].norm1.linear.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].norm1.linear.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].norm1.linear.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].norm1.linear.lora_dropout['default_0'], 140533117289568) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].norm1.linear.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].norm1.linear.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].norm1.linear.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].norm1.linear.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[13].norm1.linear.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[13].norm1.linear.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].norm1.linear.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[13].norm1.linear.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].norm1.linear.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[13].norm1.linear.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[13].norm1.linear.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].norm1.linear.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].norm1.linear.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].norm1.linear._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].norm1.linear._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].norm1.linear.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[13].norm1.linear.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[13].norm1.linear.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].norm1.linear._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].norm1.linear._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].norm1.linear._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].norm1.linear._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].norm1.linear._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[13].norm1.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[13].norm1.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].norm1._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].norm1._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].norm1._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].norm1._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].norm2, accessed_by=DictGetItemGuardAccessor(norm2) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].norm2, 140581769896496) # norm_hidden_states = self.norm2(hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:182 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].norm2.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].norm2.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].norm2.training, 140591004393440) # norm_hidden_states = self.norm2(hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:182 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff_context, accessed_by=DictGetItemGuardAccessor(ff_context) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].ff_context, 140581769896832) # context_ff_output = self.ff_context(norm_encoder_hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:198 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff_context.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[13].ff_context.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff_context.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].ff_context.training, 140591004393440) # context_ff_output = self.ff_context(norm_encoder_hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:198 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff_context._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff_context.net, accessed_by=DictGetItemGuardAccessor(net) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].ff_context.net, 140581769896976) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[13].ff_context.net, 93831537618768) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- LENGTH_CHECK: len(L['self'].transformer_blocks[13].ff_context.net) == 3 # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff_context.net.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff_context.net.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].ff_context.net.training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff_context.net[0], accessed_by=GetItemGuardAccessor(0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].ff_context.net[0], 140581769896928) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff_context.net[0].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[13].ff_context.net[0].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff_context.net[0].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].ff_context.net[0].training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff_context.net[0]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff_context.net[0].proj, accessed_by=DictGetItemGuardAccessor(proj) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].ff_context.net[0].proj, 140533117442400) # hidden_states = self.proj(hidden_states) # diffusers/src/diffusers/models/activations.py:88 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff_context.net[0].proj.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[13].ff_context.net[0].proj.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff_context.net[0].proj.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].ff_context.net[0].proj.training, 140591004393408) # hidden_states = self.proj(hidden_states) # diffusers/src/diffusers/models/activations.py:88 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff_context.net[0].proj._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff_context.net[0].proj.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].ff_context.net[0].proj.lora_A, 140533117514224) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff_context.net[0].proj.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff_context.net[0].proj.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].ff_context.net[0].proj.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff_context.net[0].proj.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].ff_context.net[0].proj.lora_A['default_0'], 140533117507648) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff_context.net[0].proj.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff_context.net[0].proj.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].ff_context.net[0].proj.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff_context.net[0].proj.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff_context.net[0].proj.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].ff_context.net[0].proj.lora_A['default_0'].weight, 140531261937024) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff_context.net[0].proj.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].ff_context.net[0].proj.lora_B, 140533117513360) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff_context.net[0].proj.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff_context.net[0].proj.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].ff_context.net[0].proj.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff_context.net[0].proj.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].ff_context.net[0].proj.lora_B['default_0'], 140533117510576) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff_context.net[0].proj.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff_context.net[0].proj.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].ff_context.net[0].proj.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff_context.net[0].proj.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].ff_context.net[0].proj.base_layer, 140581769897024) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff_context.net[0].proj.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff_context.net[0].proj.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].ff_context.net[0].proj.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff_context.net[0].proj.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].ff_context.net[0].proj.lora_dropout, 140533117504576) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff_context.net[0].proj.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff_context.net[0].proj.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].ff_context.net[0].proj.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff_context.net[0].proj.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].ff_context.net[0].proj.lora_dropout['default_0'], 140533117508080) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff_context.net[0].proj.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff_context.net[0].proj.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].ff_context.net[0].proj.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff_context.net[0].proj.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[13].ff_context.net[0].proj.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[13].ff_context.net[0].proj.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff_context.net[0].proj.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[13].ff_context.net[0].proj.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff_context.net[0].proj.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[13].ff_context.net[0].proj.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[13].ff_context.net[0].proj.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff_context.net[0].proj.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].ff_context.net[0].proj.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff_context.net[0].proj._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff_context.net[0].proj._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff_context.net[0].proj.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[13].ff_context.net[0].proj.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[13].ff_context.net[0].proj.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff_context.net[0].proj._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].ff_context.net[0].proj._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff_context.net[0].proj._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff_context.net[0].proj._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff_context.net[0].proj._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[13].ff_context.net[0].proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[13].ff_context.net[0].proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff_context.net[0].approximate, accessed_by=DictGetItemGuardAccessor(approximate) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[13].ff_context.net[0].approximate == 'tanh' # return F.gelu(gate, approximate=self.approximate) # diffusers/src/diffusers/models/activations.py:83 in gelu V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff_context.net[0]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff_context.net[0]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff_context.net[0]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff_context.net[0]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff_context.net[1], accessed_by=GetItemGuardAccessor(1) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].ff_context.net[1], 140581769897120) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff_context.net[1].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff_context.net[1].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].ff_context.net[1].training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff_context.net[2], accessed_by=GetItemGuardAccessor(2) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].ff_context.net[2], 140533117508560) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff_context.net[2].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[13].ff_context.net[2].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff_context.net[2].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].ff_context.net[2].training, 140591004393408) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff_context.net[2]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff_context.net[2].lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].ff_context.net[2].lora_A, 140533117504144) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff_context.net[2].lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff_context.net[2].lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].ff_context.net[2].lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff_context.net[2].lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].ff_context.net[2].lora_A['default_0'], 140533117517728) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff_context.net[2].lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff_context.net[2].lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].ff_context.net[2].lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff_context.net[2].lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff_context.net[2].lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].ff_context.net[2].lora_A['default_0'].weight, 140526666835472) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff_context.net[2].lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].ff_context.net[2].lora_B, 140533117508032) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff_context.net[2].lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff_context.net[2].lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].ff_context.net[2].lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff_context.net[2].lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].ff_context.net[2].lora_B['default_0'], 140533117502704) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff_context.net[2].lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff_context.net[2].lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].ff_context.net[2].lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff_context.net[2].base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].ff_context.net[2].base_layer, 140581769897168) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff_context.net[2].base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff_context.net[2].base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].ff_context.net[2].base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff_context.net[2].lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].ff_context.net[2].lora_dropout, 140533117507408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff_context.net[2].lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff_context.net[2].lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].ff_context.net[2].lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff_context.net[2].lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].ff_context.net[2].lora_dropout['default_0'], 140533117504768) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff_context.net[2].lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff_context.net[2].lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].ff_context.net[2].lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff_context.net[2].scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[13].ff_context.net[2].scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[13].ff_context.net[2].scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff_context.net[2].scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[13].ff_context.net[2].scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff_context.net[2].use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[13].ff_context.net[2].use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[13].ff_context.net[2].use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff_context.net[2].use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].ff_context.net[2].use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff_context.net[2]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff_context.net[2]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff_context.net[2].merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[13].ff_context.net[2].merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[13].ff_context.net[2].merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff_context.net[2]._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].ff_context.net[2]._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff_context.net[2]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff_context.net[2]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff_context.net[2]._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[13].ff_context.net[2]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[13].ff_context.net[2]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff_context._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff_context._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff_context._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].ff_context._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].norm1_context, accessed_by=DictGetItemGuardAccessor(norm1_context) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].norm1_context, 140581769895344) # norm_encoder_hidden_states, c_gate_msa, c_shift_mlp, c_scale_mlp, c_gate_mlp = self.norm1_context( # diffusers/src/diffusers/models/transformers/transformer_flux.py:167 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].norm1_context.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[13].norm1_context.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].norm1_context.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].norm1_context.training, 140591004393440) # norm_encoder_hidden_states, c_gate_msa, c_shift_mlp, c_scale_mlp, c_gate_mlp = self.norm1_context( # diffusers/src/diffusers/models/transformers/transformer_flux.py:167 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].norm1_context.emb, accessed_by=DictGetItemGuardAccessor(emb) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].norm1_context.emb, 140591004478624) # if self.emb is not None: # diffusers/src/diffusers/models/normalization.py:135 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].norm1_context._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].norm1_context.norm, accessed_by=DictGetItemGuardAccessor(norm) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].norm1_context.norm, 140581769895536) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:139 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].norm1_context.norm.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].norm1_context.norm.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].norm1_context.norm.training, 140591004393440) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:139 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].norm1_context.silu, accessed_by=DictGetItemGuardAccessor(silu) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].norm1_context.silu, 140581769895440) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].norm1_context.silu.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].norm1_context.silu.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].norm1_context.silu.training, 140591004393440) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].norm1_context.linear, accessed_by=DictGetItemGuardAccessor(linear) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].norm1_context.linear, 140533117036592) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].norm1_context.linear.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[13].norm1_context.linear.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].norm1_context.linear.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].norm1_context.linear.training, 140591004393408) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].norm1_context.linear._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].norm1_context.linear.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].norm1_context.linear.lora_A, 140533117040912) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].norm1_context.linear.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].norm1_context.linear.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].norm1_context.linear.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].norm1_context.linear.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].norm1_context.linear.lora_A['default_0'], 140533117033808) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].norm1_context.linear.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].norm1_context.linear.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].norm1_context.linear.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].norm1_context.linear.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].norm1_context.linear.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].norm1_context.linear.lora_A['default_0'].weight, 140526778741632) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].norm1_context.linear.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].norm1_context.linear.lora_B, 140533117039808) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].norm1_context.linear.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].norm1_context.linear.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].norm1_context.linear.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].norm1_context.linear.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].norm1_context.linear.lora_B['default_0'], 140533117036400) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].norm1_context.linear.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].norm1_context.linear.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].norm1_context.linear.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].norm1_context.linear.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].norm1_context.linear.base_layer, 140581769895488) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].norm1_context.linear.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].norm1_context.linear.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].norm1_context.linear.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].norm1_context.linear.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].norm1_context.linear.lora_dropout, 140533117038800) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].norm1_context.linear.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].norm1_context.linear.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].norm1_context.linear.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].norm1_context.linear.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].norm1_context.linear.lora_dropout['default_0'], 140533117036496) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].norm1_context.linear.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].norm1_context.linear.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].norm1_context.linear.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].norm1_context.linear.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[13].norm1_context.linear.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[13].norm1_context.linear.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].norm1_context.linear.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[13].norm1_context.linear.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].norm1_context.linear.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[13].norm1_context.linear.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[13].norm1_context.linear.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].norm1_context.linear.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].norm1_context.linear.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].norm1_context.linear._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].norm1_context.linear._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].norm1_context.linear.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[13].norm1_context.linear.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[13].norm1_context.linear.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].norm1_context.linear._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].norm1_context.linear._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].norm1_context.linear._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].norm1_context.linear._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].norm1_context.linear._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[13].norm1_context.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[13].norm1_context.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].norm1_context._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].norm1_context._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].norm1_context._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].norm1_context._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].norm2_context, accessed_by=DictGetItemGuardAccessor(norm2_context) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].norm2_context, 140581769896544) # norm_encoder_hidden_states = self.norm2_context(encoder_hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:195 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].norm2_context.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13].norm2_context.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[13].norm2_context.training, 140591004393440) # norm_encoder_hidden_states = self.norm2_context(encoder_hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:195 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[13]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | +- GuardManager: source=L['self'].transformer_blocks[14], accessed_by=GetItemGuardAccessor(14) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14], 140581769894816) # for index_block, block in enumerate(self.transformer_blocks): # diffusers/src/diffusers/models/transformers/transformer_flux.py:471 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[14].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].training, 140591004393440) # for index_block, block in enumerate(self.transformer_blocks): # diffusers/src/diffusers/models/transformers/transformer_flux.py:471 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff, accessed_by=DictGetItemGuardAccessor(ff) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].ff, 140581769898464) # ff_output = self.ff(norm_hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:185 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[14].ff.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].ff.training, 140591004393440) # ff_output = self.ff(norm_hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:185 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff.net, accessed_by=DictGetItemGuardAccessor(net) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].ff.net, 140581769898704) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[14].ff.net, 93831537618768) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- LENGTH_CHECK: len(L['self'].transformer_blocks[14].ff.net) == 3 # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff.net.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff.net.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].ff.net.training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff.net[0], accessed_by=GetItemGuardAccessor(0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].ff.net[0], 140581769898656) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff.net[0].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[14].ff.net[0].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff.net[0].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].ff.net[0].training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff.net[0]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff.net[0].proj, accessed_by=DictGetItemGuardAccessor(proj) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].ff.net[0].proj, 140533116412752) # hidden_states = self.proj(hidden_states) # diffusers/src/diffusers/models/activations.py:88 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff.net[0].proj.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[14].ff.net[0].proj.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff.net[0].proj.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].ff.net[0].proj.training, 140591004393408) # hidden_states = self.proj(hidden_states) # diffusers/src/diffusers/models/activations.py:88 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff.net[0].proj._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff.net[0].proj.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].ff.net[0].proj.lora_A, 140533116410448) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff.net[0].proj.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff.net[0].proj.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].ff.net[0].proj.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff.net[0].proj.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].ff.net[0].proj.lora_A['default_0'], 140533116777616) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff.net[0].proj.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff.net[0].proj.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].ff.net[0].proj.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff.net[0].proj.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff.net[0].proj.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].ff.net[0].proj.lora_A['default_0'].weight, 140526768783120) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff.net[0].proj.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].ff.net[0].proj.lora_B, 140533116775312) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff.net[0].proj.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff.net[0].proj.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].ff.net[0].proj.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff.net[0].proj.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].ff.net[0].proj.lora_B['default_0'], 140533116766960) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff.net[0].proj.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff.net[0].proj.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].ff.net[0].proj.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff.net[0].proj.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].ff.net[0].proj.base_layer, 140581769898752) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff.net[0].proj.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff.net[0].proj.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].ff.net[0].proj.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff.net[0].proj.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].ff.net[0].proj.lora_dropout, 140533116416832) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff.net[0].proj.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff.net[0].proj.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].ff.net[0].proj.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff.net[0].proj.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].ff.net[0].proj.lora_dropout['default_0'], 140533116405696) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff.net[0].proj.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff.net[0].proj.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].ff.net[0].proj.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff.net[0].proj.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[14].ff.net[0].proj.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[14].ff.net[0].proj.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff.net[0].proj.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[14].ff.net[0].proj.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff.net[0].proj.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[14].ff.net[0].proj.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[14].ff.net[0].proj.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff.net[0].proj.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].ff.net[0].proj.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff.net[0].proj._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff.net[0].proj._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff.net[0].proj.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[14].ff.net[0].proj.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[14].ff.net[0].proj.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff.net[0].proj._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].ff.net[0].proj._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff.net[0].proj._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff.net[0].proj._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff.net[0].proj._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[14].ff.net[0].proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[14].ff.net[0].proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff.net[0].approximate, accessed_by=DictGetItemGuardAccessor(approximate) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[14].ff.net[0].approximate == 'tanh' # return F.gelu(gate, approximate=self.approximate) # diffusers/src/diffusers/models/activations.py:83 in gelu V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff.net[0]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff.net[0]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff.net[0]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff.net[0]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff.net[1], accessed_by=GetItemGuardAccessor(1) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].ff.net[1], 140581769898800) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff.net[1].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff.net[1].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].ff.net[1].training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff.net[2], accessed_by=GetItemGuardAccessor(2) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].ff.net[2], 140533116161520) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff.net[2].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[14].ff.net[2].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff.net[2].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].ff.net[2].training, 140591004393408) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff.net[2]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff.net[2].lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].ff.net[2].lora_A, 140533117717520) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff.net[2].lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff.net[2].lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].ff.net[2].lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff.net[2].lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].ff.net[2].lora_A['default_0'], 140533115777808) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff.net[2].lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff.net[2].lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].ff.net[2].lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff.net[2].lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff.net[2].lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].ff.net[2].lora_A['default_0'].weight, 140526768770800) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff.net[2].lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].ff.net[2].lora_B, 140533117729472) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff.net[2].lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff.net[2].lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].ff.net[2].lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff.net[2].lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].ff.net[2].lora_B['default_0'], 140533115778192) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff.net[2].lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff.net[2].lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].ff.net[2].lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff.net[2].base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].ff.net[2].base_layer, 140581769898848) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff.net[2].base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff.net[2].base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].ff.net[2].base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff.net[2].lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].ff.net[2].lora_dropout, 140533117718816) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff.net[2].lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff.net[2].lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].ff.net[2].lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff.net[2].lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].ff.net[2].lora_dropout['default_0'], 140533117718960) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff.net[2].lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff.net[2].lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].ff.net[2].lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff.net[2].scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[14].ff.net[2].scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[14].ff.net[2].scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff.net[2].scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[14].ff.net[2].scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff.net[2].use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[14].ff.net[2].use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[14].ff.net[2].use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff.net[2].use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].ff.net[2].use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff.net[2]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff.net[2]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff.net[2].merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[14].ff.net[2].merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[14].ff.net[2].merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff.net[2]._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].ff.net[2]._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff.net[2]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff.net[2]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff.net[2]._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[14].ff.net[2]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[14].ff.net[2]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn, accessed_by=DictGetItemGuardAccessor(attn) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn, 140581769897696) # attn_output, context_attn_output = self.attn( # diffusers/src/diffusers/models/transformers/transformer_flux.py:172 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[14].attn.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.training, 140591004393440) # attn_output, context_attn_output = self.attn( # diffusers/src/diffusers/models/transformers/transformer_flux.py:172 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_k, accessed_by=DictGetItemGuardAccessor(to_k) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.to_k, 140533115899760) # key = attn.to_k(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1717 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[14].attn.to_k.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.to_k.training, 140591004393408) # key = attn.to_k(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1717 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_k._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_k.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.to_k.lora_A, 140533115909696) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_k.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_k.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.to_k.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_k.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.to_k.lora_A['default_0'], 140533117674176) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_k.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_k.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.to_k.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_k.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_k.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.to_k.lora_A['default_0'].weight, 140526696540064) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_k.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.to_k.lora_B, 140533115899376) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_k.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_k.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.to_k.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_k.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.to_k.lora_B['default_0'], 140533117671392) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_k.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_k.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.to_k.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_k.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.to_k.base_layer, 140581769897840) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_k.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_k.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.to_k.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_k.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.to_k.lora_dropout, 140533115899856) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_k.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_k.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.to_k.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_k.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.to_k.lora_dropout['default_0'], 140533115899712) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_k.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_k.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.to_k.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_k.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[14].attn.to_k.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[14].attn.to_k.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_k.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[14].attn.to_k.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_k.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[14].attn.to_k.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[14].attn.to_k.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_k.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.to_k.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_k._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_k._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_k.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[14].attn.to_k.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[14].attn.to_k.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_k._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.to_k._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_k._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_k._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_k._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[14].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[14].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_q, accessed_by=DictGetItemGuardAccessor(to_q) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.to_q, 140533117501648) # query = attn.to_q(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1716 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[14].attn.to_q.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.to_q.training, 140591004393408) # query = attn.to_q(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1716 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_q._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_q.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.to_q.lora_A, 140533116987392) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_q.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_q.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.to_q.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_q.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.to_q.lora_A['default_0'], 140533115898128) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_q.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_q.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.to_q.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_q.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_q.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.to_q.lora_A['default_0'].weight, 140526666827872) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_q.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.to_q.lora_B, 140533116992480) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_q.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_q.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.to_q.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_q.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.to_q.lora_B['default_0'], 140533115899088) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_q.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_q.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.to_q.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_q.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.to_q.base_layer, 140581769897936) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_q.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_q.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.to_q.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_q.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.to_q.lora_dropout, 140533116986480) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_q.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_q.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.to_q.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_q.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.to_q.lora_dropout['default_0'], 140533116979952) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_q.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_q.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.to_q.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_q.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[14].attn.to_q.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[14].attn.to_q.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_q.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[14].attn.to_q.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_q.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[14].attn.to_q.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[14].attn.to_q.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_q.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.to_q.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_q._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_q._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_q.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[14].attn.to_q.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[14].attn.to_q.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_q._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.to_q._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_q._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_q._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_q._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[14].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[14].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_v, accessed_by=DictGetItemGuardAccessor(to_v) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.to_v, 140533117669568) # value = attn.to_v(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1718 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_v.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[14].attn.to_v.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_v.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.to_v.training, 140591004393408) # value = attn.to_v(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1718 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_v._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_v.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.to_v.lora_A, 140533117672928) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_v.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_v.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.to_v.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_v.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.to_v.lora_A['default_0'], 140533117669280) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_v.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_v.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.to_v.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_v.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_v.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.to_v.lora_A['default_0'].weight, 140526696540304) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_v.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.to_v.lora_B, 140533117672784) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_v.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_v.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.to_v.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_v.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.to_v.lora_B['default_0'], 140533117671344) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_v.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_v.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.to_v.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_v.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.to_v.base_layer, 140581769898032) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_v.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_v.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.to_v.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_v.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.to_v.lora_dropout, 140533117668224) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_v.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_v.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.to_v.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_v.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.to_v.lora_dropout['default_0'], 140533117670480) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_v.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_v.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.to_v.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_v.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[14].attn.to_v.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[14].attn.to_v.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_v.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[14].attn.to_v.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_v.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[14].attn.to_v.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[14].attn.to_v.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_v.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.to_v.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_v._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_v._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_v.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[14].attn.to_v.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[14].attn.to_v.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_v._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.to_v._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_v._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_v._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_v._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[14].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[14].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.norm_k, accessed_by=DictGetItemGuardAccessor(norm_k) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.norm_k, 140581769897888) # if attn.norm_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1729 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.norm_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[14].attn.norm_k.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.norm_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.norm_k.training, 140591004393440) # if attn.norm_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1729 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.norm_k.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[14].attn.norm_k.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.norm_k._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.norm_k.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.norm_k.weight, 140581765999456) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.norm_k._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.norm_k._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.norm_k._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.norm_k._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.norm_q, accessed_by=DictGetItemGuardAccessor(norm_q) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.norm_q, 140581769897792) # if attn.norm_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1727 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.norm_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[14].attn.norm_q.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.norm_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.norm_q.training, 140591004393440) # if attn.norm_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1727 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.norm_q.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[14].attn.norm_q.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.norm_q._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.norm_q.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.norm_q.weight, 140581772710976) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.norm_q._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.norm_q._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.norm_q._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.norm_q._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_out, accessed_by=DictGetItemGuardAccessor(to_out) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.to_out, 140581769898224) # hidden_states = attn.to_out[0](hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1776 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_out.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_out.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.to_out.training, 140591004393440) # hidden_states = attn.to_out[0](hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1776 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_out[0], accessed_by=GetItemGuardAccessor(0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.to_out[0], 140533116409104) # hidden_states = attn.to_out[0](hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1776 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_out[0].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[14].attn.to_out[0].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_out[0].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.to_out[0].training, 140591004393408) # hidden_states = attn.to_out[0](hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1776 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_out[0]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_out[0].lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.to_out[0].lora_A, 140533116409584) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_out[0].lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_out[0].lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.to_out[0].lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_out[0].lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.to_out[0].lora_A['default_0'], 140533116417360) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_out[0].lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_out[0].lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.to_out[0].lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_out[0].lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_out[0].lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.to_out[0].lora_A['default_0'].weight, 140526768769440) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_out[0].lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.to_out[0].lora_B, 140533116418560) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_out[0].lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_out[0].lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.to_out[0].lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_out[0].lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.to_out[0].lora_B['default_0'], 140533116414384) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_out[0].lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_out[0].lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.to_out[0].lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_out[0].base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.to_out[0].base_layer, 140581769898272) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_out[0].base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_out[0].base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.to_out[0].base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_out[0].lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.to_out[0].lora_dropout, 140533116415920) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_out[0].lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_out[0].lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.to_out[0].lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_out[0].lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.to_out[0].lora_dropout['default_0'], 140533116408720) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_out[0].lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_out[0].lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.to_out[0].lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_out[0].scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[14].attn.to_out[0].scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[14].attn.to_out[0].scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_out[0].scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[14].attn.to_out[0].scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_out[0].use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[14].attn.to_out[0].use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[14].attn.to_out[0].use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_out[0].use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.to_out[0].use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_out[0]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_out[0]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_out[0].merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[14].attn.to_out[0].merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[14].attn.to_out[0].merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_out[0]._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.to_out[0]._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_out[0]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_out[0]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_out[0]._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[14].attn.to_out[0]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[14].attn.to_out[0]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_out[1], accessed_by=GetItemGuardAccessor(1) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.to_out[1], 140581769898320) # hidden_states = attn.to_out[1](hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1778 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_out[1].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_out[1].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.to_out[1].training, 140591004393440) # hidden_states = attn.to_out[1](hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1778 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.add_k_proj, accessed_by=DictGetItemGuardAccessor(add_k_proj) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.add_k_proj, 140533117671200) # encoder_hidden_states_key_proj = attn.add_k_proj(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1736 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.add_k_proj.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[14].attn.add_k_proj.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.add_k_proj.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.add_k_proj.training, 140591004393408) # encoder_hidden_states_key_proj = attn.add_k_proj(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1736 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.add_k_proj._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.add_k_proj.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.add_k_proj.lora_A, 140533117668512) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.add_k_proj.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.add_k_proj.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.add_k_proj.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.add_k_proj.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.add_k_proj.lora_A['default_0'], 140533117680656) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.add_k_proj.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.add_k_proj.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.add_k_proj.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.add_k_proj.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.add_k_proj.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.add_k_proj.lora_A['default_0'].weight, 140526696537424) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.add_k_proj.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.add_k_proj.lora_B, 140533117670336) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.add_k_proj.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.add_k_proj.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.add_k_proj.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.add_k_proj.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.add_k_proj.lora_B['default_0'], 140533117669376) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.add_k_proj.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.add_k_proj.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.add_k_proj.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.add_k_proj.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.add_k_proj.base_layer, 140581769898080) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.add_k_proj.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.add_k_proj.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.add_k_proj.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.add_k_proj.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.add_k_proj.lora_dropout, 140533117671152) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.add_k_proj.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.add_k_proj.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.add_k_proj.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.add_k_proj.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.add_k_proj.lora_dropout['default_0'], 140533117667264) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.add_k_proj.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.add_k_proj.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.add_k_proj.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.add_k_proj.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[14].attn.add_k_proj.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[14].attn.add_k_proj.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.add_k_proj.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[14].attn.add_k_proj.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.add_k_proj.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[14].attn.add_k_proj.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[14].attn.add_k_proj.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.add_k_proj.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.add_k_proj.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.add_k_proj._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.add_k_proj._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.add_k_proj.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[14].attn.add_k_proj.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[14].attn.add_k_proj.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.add_k_proj._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.add_k_proj._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.add_k_proj._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.add_k_proj._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.add_k_proj._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[14].attn.add_k_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[14].attn.add_k_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.add_q_proj, accessed_by=DictGetItemGuardAccessor(add_q_proj) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.add_q_proj, 140533116410112) # encoder_hidden_states_query_proj = attn.add_q_proj(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1735 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.add_q_proj.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[14].attn.add_q_proj.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.add_q_proj.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.add_q_proj.training, 140591004393408) # encoder_hidden_states_query_proj = attn.add_q_proj(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1735 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.add_q_proj._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.add_q_proj.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.add_q_proj.lora_A, 140533116406704) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.add_q_proj.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.add_q_proj.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.add_q_proj.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.add_q_proj.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.add_q_proj.lora_A['default_0'], 140533116414528) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.add_q_proj.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.add_q_proj.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.add_q_proj.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.add_q_proj.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.add_q_proj.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.add_q_proj.lora_A['default_0'].weight, 140526696537344) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.add_q_proj.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.add_q_proj.lora_B, 140533116419808) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.add_q_proj.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.add_q_proj.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.add_q_proj.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.add_q_proj.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.add_q_proj.lora_B['default_0'], 140533116419712) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.add_q_proj.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.add_q_proj.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.add_q_proj.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.add_q_proj.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.add_q_proj.base_layer, 140581769898176) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.add_q_proj.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.add_q_proj.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.add_q_proj.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.add_q_proj.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.add_q_proj.lora_dropout, 140533116406176) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.add_q_proj.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.add_q_proj.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.add_q_proj.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.add_q_proj.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.add_q_proj.lora_dropout['default_0'], 140533116408192) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.add_q_proj.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.add_q_proj.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.add_q_proj.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.add_q_proj.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[14].attn.add_q_proj.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[14].attn.add_q_proj.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.add_q_proj.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[14].attn.add_q_proj.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.add_q_proj.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[14].attn.add_q_proj.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[14].attn.add_q_proj.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.add_q_proj.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.add_q_proj.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.add_q_proj._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.add_q_proj._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.add_q_proj.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[14].attn.add_q_proj.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[14].attn.add_q_proj.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.add_q_proj._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.add_q_proj._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.add_q_proj._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.add_q_proj._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.add_q_proj._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[14].attn.add_q_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[14].attn.add_q_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.add_v_proj, accessed_by=DictGetItemGuardAccessor(add_v_proj) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.add_v_proj, 140533117670768) # encoder_hidden_states_value_proj = attn.add_v_proj(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1737 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.add_v_proj.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[14].attn.add_v_proj.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.add_v_proj.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.add_v_proj.training, 140591004393408) # encoder_hidden_states_value_proj = attn.add_v_proj(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1737 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.add_v_proj._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.add_v_proj.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.add_v_proj.lora_A, 140533117671584) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.add_v_proj.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.add_v_proj.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.add_v_proj.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.add_v_proj.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.add_v_proj.lora_A['default_0'], 140533117670624) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.add_v_proj.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.add_v_proj.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.add_v_proj.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.add_v_proj.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.add_v_proj.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.add_v_proj.lora_A['default_0'].weight, 140526696543344) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.add_v_proj.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.add_v_proj.lora_B, 140533117669424) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.add_v_proj.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.add_v_proj.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.add_v_proj.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.add_v_proj.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.add_v_proj.lora_B['default_0'], 140533117669856) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.add_v_proj.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.add_v_proj.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.add_v_proj.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.add_v_proj.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.add_v_proj.base_layer, 140581769898128) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.add_v_proj.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.add_v_proj.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.add_v_proj.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.add_v_proj.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.add_v_proj.lora_dropout, 140533117670096) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.add_v_proj.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.add_v_proj.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.add_v_proj.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.add_v_proj.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.add_v_proj.lora_dropout['default_0'], 140533117670912) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.add_v_proj.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.add_v_proj.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.add_v_proj.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.add_v_proj.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[14].attn.add_v_proj.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[14].attn.add_v_proj.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.add_v_proj.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[14].attn.add_v_proj.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.add_v_proj.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[14].attn.add_v_proj.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[14].attn.add_v_proj.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.add_v_proj.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.add_v_proj.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.add_v_proj._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.add_v_proj._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.add_v_proj.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[14].attn.add_v_proj.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[14].attn.add_v_proj.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.add_v_proj._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.add_v_proj._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.add_v_proj._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.add_v_proj._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.add_v_proj._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[14].attn.add_v_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[14].attn.add_v_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_add_out, accessed_by=DictGetItemGuardAccessor(to_add_out) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.to_add_out, 140533116404784) # encoder_hidden_states = attn.to_add_out(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1779 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_add_out.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[14].attn.to_add_out.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_add_out.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.to_add_out.training, 140591004393408) # encoder_hidden_states = attn.to_add_out(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1779 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_add_out._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_add_out.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.to_add_out.lora_A, 140533116403920) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_add_out.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_add_out.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.to_add_out.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_add_out.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.to_add_out.lora_A['default_0'], 140533116412800) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_add_out.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_add_out.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.to_add_out.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_add_out.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_add_out.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.to_add_out.lora_A['default_0'].weight, 140526768770240) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_add_out.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.to_add_out.lora_B, 140533116415248) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_add_out.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_add_out.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.to_add_out.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_add_out.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.to_add_out.lora_B['default_0'], 140533116409728) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_add_out.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_add_out.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.to_add_out.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_add_out.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.to_add_out.base_layer, 140581769898368) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_add_out.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_add_out.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.to_add_out.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_add_out.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.to_add_out.lora_dropout, 140533116413712) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_add_out.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_add_out.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.to_add_out.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_add_out.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.to_add_out.lora_dropout['default_0'], 140533116405360) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_add_out.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_add_out.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.to_add_out.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_add_out.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[14].attn.to_add_out.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[14].attn.to_add_out.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_add_out.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[14].attn.to_add_out.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_add_out.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[14].attn.to_add_out.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[14].attn.to_add_out.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_add_out.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.to_add_out.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_add_out._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_add_out._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_add_out.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[14].attn.to_add_out.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[14].attn.to_add_out.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_add_out._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.to_add_out._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_add_out._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_add_out._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.to_add_out._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[14].attn.to_add_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[14].attn.to_add_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.norm_added_k, accessed_by=DictGetItemGuardAccessor(norm_added_k) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.norm_added_k, 140581769898512) # if attn.norm_added_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1751 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.norm_added_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[14].attn.norm_added_k.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.norm_added_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.norm_added_k.training, 140591004393440) # if attn.norm_added_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1751 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.norm_added_k.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[14].attn.norm_added_k.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.norm_added_k._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.norm_added_k.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.norm_added_k.weight, 140581765999296) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.norm_added_k._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.norm_added_k._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.norm_added_k._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.norm_added_k._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.norm_added_q, accessed_by=DictGetItemGuardAccessor(norm_added_q) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.norm_added_q, 140581769898416) # if attn.norm_added_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1749 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.norm_added_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[14].attn.norm_added_q.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.norm_added_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.norm_added_q.training, 140591004393440) # if attn.norm_added_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1749 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.norm_added_q.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[14].attn.norm_added_q.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.norm_added_q._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.norm_added_q.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.norm_added_q.weight, 140581765999376) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.norm_added_q._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.norm_added_q._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.norm_added_q._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.norm_added_q._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.heads, accessed_by=DictGetItemGuardAccessor(heads) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[14].attn.heads == 24 # head_dim = inner_dim // attn.heads # diffusers/src/diffusers/models/attention_processor.py:1721 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn.processor, accessed_by=DictGetItemGuardAccessor(processor) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[14].attn.processor, 93831581524080) # attn_parameters = set(inspect.signature(self.processor.__call__).parameters.keys()) # diffusers/src/diffusers/models/attention_processor.py:479 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].attn.processor, 140581769897648) # return self.processor( # diffusers/src/diffusers/models/attention_processor.py:490 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].attn._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].norm1, accessed_by=DictGetItemGuardAccessor(norm1) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].norm1, 140581769897216) # norm_hidden_states, gate_msa, shift_mlp, scale_mlp, gate_mlp = self.norm1(hidden_states, emb=temb) # diffusers/src/diffusers/models/transformers/transformer_flux.py:165 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].norm1.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[14].norm1.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].norm1.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].norm1.training, 140591004393440) # norm_hidden_states, gate_msa, shift_mlp, scale_mlp, gate_mlp = self.norm1(hidden_states, emb=temb) # diffusers/src/diffusers/models/transformers/transformer_flux.py:165 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].norm1.emb, accessed_by=DictGetItemGuardAccessor(emb) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].norm1.emb, 140591004478624) # if self.emb is not None: # diffusers/src/diffusers/models/normalization.py:135 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].norm1._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].norm1.norm, accessed_by=DictGetItemGuardAccessor(norm) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].norm1.norm, 140581769897360) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:139 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].norm1.norm.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].norm1.norm.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].norm1.norm.training, 140591004393440) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:139 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].norm1.silu, accessed_by=DictGetItemGuardAccessor(silu) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].norm1.silu, 140581769897264) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].norm1.silu.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].norm1.silu.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].norm1.silu.training, 140591004393440) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].norm1.linear, accessed_by=DictGetItemGuardAccessor(linear) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].norm1.linear, 140533117517776) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].norm1.linear.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[14].norm1.linear.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].norm1.linear.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].norm1.linear.training, 140591004393408) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].norm1.linear._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].norm1.linear.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].norm1.linear.lora_A, 140533117513696) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].norm1.linear.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].norm1.linear.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].norm1.linear.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].norm1.linear.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].norm1.linear.lora_A['default_0'], 140533117506592) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].norm1.linear.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].norm1.linear.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].norm1.linear.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].norm1.linear.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].norm1.linear.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].norm1.linear.lora_A['default_0'].weight, 140526666835392) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].norm1.linear.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].norm1.linear.lora_B, 140533117513408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].norm1.linear.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].norm1.linear.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].norm1.linear.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].norm1.linear.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].norm1.linear.lora_B['default_0'], 140533117510336) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].norm1.linear.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].norm1.linear.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].norm1.linear.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].norm1.linear.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].norm1.linear.base_layer, 140581769897312) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].norm1.linear.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].norm1.linear.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].norm1.linear.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].norm1.linear.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].norm1.linear.lora_dropout, 140533117517584) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].norm1.linear.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].norm1.linear.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].norm1.linear.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].norm1.linear.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].norm1.linear.lora_dropout['default_0'], 140533117510624) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].norm1.linear.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].norm1.linear.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].norm1.linear.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].norm1.linear.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[14].norm1.linear.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[14].norm1.linear.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].norm1.linear.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[14].norm1.linear.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].norm1.linear.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[14].norm1.linear.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[14].norm1.linear.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].norm1.linear.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].norm1.linear.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].norm1.linear._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].norm1.linear._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].norm1.linear.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[14].norm1.linear.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[14].norm1.linear.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].norm1.linear._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].norm1.linear._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].norm1.linear._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].norm1.linear._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].norm1.linear._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[14].norm1.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[14].norm1.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].norm1._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].norm1._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].norm1._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].norm1._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].norm2, accessed_by=DictGetItemGuardAccessor(norm2) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].norm2, 140581769898560) # norm_hidden_states = self.norm2(hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:182 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].norm2.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].norm2.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].norm2.training, 140591004393440) # norm_hidden_states = self.norm2(hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:182 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff_context, accessed_by=DictGetItemGuardAccessor(ff_context) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].ff_context, 140581769898896) # context_ff_output = self.ff_context(norm_encoder_hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:198 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff_context.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[14].ff_context.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff_context.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].ff_context.training, 140591004393440) # context_ff_output = self.ff_context(norm_encoder_hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:198 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff_context._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff_context.net, accessed_by=DictGetItemGuardAccessor(net) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].ff_context.net, 140581769899040) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[14].ff_context.net, 93831537618768) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- LENGTH_CHECK: len(L['self'].transformer_blocks[14].ff_context.net) == 3 # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff_context.net.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff_context.net.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].ff_context.net.training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff_context.net[0], accessed_by=GetItemGuardAccessor(0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].ff_context.net[0], 140581769898992) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff_context.net[0].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[14].ff_context.net[0].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff_context.net[0].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].ff_context.net[0].training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff_context.net[0]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff_context.net[0].proj, accessed_by=DictGetItemGuardAccessor(proj) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].ff_context.net[0].proj, 140533115778144) # hidden_states = self.proj(hidden_states) # diffusers/src/diffusers/models/activations.py:88 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff_context.net[0].proj.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[14].ff_context.net[0].proj.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff_context.net[0].proj.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].ff_context.net[0].proj.training, 140591004393408) # hidden_states = self.proj(hidden_states) # diffusers/src/diffusers/models/activations.py:88 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff_context.net[0].proj._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff_context.net[0].proj.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].ff_context.net[0].proj.lora_A, 140533115768784) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff_context.net[0].proj.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff_context.net[0].proj.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].ff_context.net[0].proj.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff_context.net[0].proj.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].ff_context.net[0].proj.lora_A['default_0'], 140533117137248) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff_context.net[0].proj.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff_context.net[0].proj.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].ff_context.net[0].proj.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff_context.net[0].proj.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff_context.net[0].proj.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].ff_context.net[0].proj.lora_A['default_0'].weight, 140526694208496) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff_context.net[0].proj.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].ff_context.net[0].proj.lora_B, 140533115772000) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff_context.net[0].proj.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff_context.net[0].proj.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].ff_context.net[0].proj.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff_context.net[0].proj.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].ff_context.net[0].proj.lora_B['default_0'], 140533117127216) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff_context.net[0].proj.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff_context.net[0].proj.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].ff_context.net[0].proj.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff_context.net[0].proj.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].ff_context.net[0].proj.base_layer, 140581769899088) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff_context.net[0].proj.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff_context.net[0].proj.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].ff_context.net[0].proj.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff_context.net[0].proj.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].ff_context.net[0].proj.lora_dropout, 140533115772672) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff_context.net[0].proj.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff_context.net[0].proj.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].ff_context.net[0].proj.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff_context.net[0].proj.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].ff_context.net[0].proj.lora_dropout['default_0'], 140533115772768) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff_context.net[0].proj.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff_context.net[0].proj.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].ff_context.net[0].proj.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff_context.net[0].proj.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[14].ff_context.net[0].proj.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[14].ff_context.net[0].proj.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff_context.net[0].proj.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[14].ff_context.net[0].proj.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff_context.net[0].proj.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[14].ff_context.net[0].proj.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[14].ff_context.net[0].proj.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff_context.net[0].proj.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].ff_context.net[0].proj.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff_context.net[0].proj._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff_context.net[0].proj._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff_context.net[0].proj.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[14].ff_context.net[0].proj.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[14].ff_context.net[0].proj.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff_context.net[0].proj._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].ff_context.net[0].proj._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff_context.net[0].proj._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff_context.net[0].proj._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff_context.net[0].proj._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[14].ff_context.net[0].proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[14].ff_context.net[0].proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff_context.net[0].approximate, accessed_by=DictGetItemGuardAccessor(approximate) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[14].ff_context.net[0].approximate == 'tanh' # return F.gelu(gate, approximate=self.approximate) # diffusers/src/diffusers/models/activations.py:83 in gelu V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff_context.net[0]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff_context.net[0]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff_context.net[0]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff_context.net[0]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff_context.net[1], accessed_by=GetItemGuardAccessor(1) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].ff_context.net[1], 140581769899184) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff_context.net[1].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff_context.net[1].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].ff_context.net[1].training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff_context.net[2], accessed_by=GetItemGuardAccessor(2) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].ff_context.net[2], 140533117137584) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff_context.net[2].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[14].ff_context.net[2].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff_context.net[2].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].ff_context.net[2].training, 140591004393408) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff_context.net[2]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff_context.net[2].lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].ff_context.net[2].lora_A, 140533117133648) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff_context.net[2].lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff_context.net[2].lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].ff_context.net[2].lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff_context.net[2].lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].ff_context.net[2].lora_A['default_0'], 140533117872032) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff_context.net[2].lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff_context.net[2].lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].ff_context.net[2].lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff_context.net[2].lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff_context.net[2].lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].ff_context.net[2].lora_A['default_0'].weight, 140526694219616) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff_context.net[2].lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].ff_context.net[2].lora_B, 140533117140512) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff_context.net[2].lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff_context.net[2].lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].ff_context.net[2].lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff_context.net[2].lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].ff_context.net[2].lora_B['default_0'], 140533117868144) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff_context.net[2].lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff_context.net[2].lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].ff_context.net[2].lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff_context.net[2].base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].ff_context.net[2].base_layer, 140581769899232) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff_context.net[2].base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff_context.net[2].base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].ff_context.net[2].base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff_context.net[2].lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].ff_context.net[2].lora_dropout, 140533117135616) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff_context.net[2].lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff_context.net[2].lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].ff_context.net[2].lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff_context.net[2].lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].ff_context.net[2].lora_dropout['default_0'], 140533117133744) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff_context.net[2].lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff_context.net[2].lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].ff_context.net[2].lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff_context.net[2].scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[14].ff_context.net[2].scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[14].ff_context.net[2].scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff_context.net[2].scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[14].ff_context.net[2].scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff_context.net[2].use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[14].ff_context.net[2].use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[14].ff_context.net[2].use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff_context.net[2].use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].ff_context.net[2].use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff_context.net[2]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff_context.net[2]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff_context.net[2].merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[14].ff_context.net[2].merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[14].ff_context.net[2].merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff_context.net[2]._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].ff_context.net[2]._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff_context.net[2]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff_context.net[2]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff_context.net[2]._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[14].ff_context.net[2]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[14].ff_context.net[2]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff_context._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff_context._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff_context._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].ff_context._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].norm1_context, accessed_by=DictGetItemGuardAccessor(norm1_context) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].norm1_context, 140581769897408) # norm_encoder_hidden_states, c_gate_msa, c_shift_mlp, c_scale_mlp, c_gate_mlp = self.norm1_context( # diffusers/src/diffusers/models/transformers/transformer_flux.py:167 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].norm1_context.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[14].norm1_context.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].norm1_context.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].norm1_context.training, 140591004393440) # norm_encoder_hidden_states, c_gate_msa, c_shift_mlp, c_scale_mlp, c_gate_mlp = self.norm1_context( # diffusers/src/diffusers/models/transformers/transformer_flux.py:167 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].norm1_context.emb, accessed_by=DictGetItemGuardAccessor(emb) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].norm1_context.emb, 140591004478624) # if self.emb is not None: # diffusers/src/diffusers/models/normalization.py:135 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].norm1_context._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].norm1_context.norm, accessed_by=DictGetItemGuardAccessor(norm) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].norm1_context.norm, 140581769897600) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:139 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].norm1_context.norm.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].norm1_context.norm.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].norm1_context.norm.training, 140591004393440) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:139 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].norm1_context.silu, accessed_by=DictGetItemGuardAccessor(silu) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].norm1_context.silu, 140581769897504) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].norm1_context.silu.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].norm1_context.silu.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].norm1_context.silu.training, 140591004393440) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].norm1_context.linear, accessed_by=DictGetItemGuardAccessor(linear) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].norm1_context.linear, 140533118779504) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].norm1_context.linear.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[14].norm1_context.linear.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].norm1_context.linear.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].norm1_context.linear.training, 140591004393408) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].norm1_context.linear._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].norm1_context.linear.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].norm1_context.linear.lora_A, 140533116169440) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].norm1_context.linear.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].norm1_context.linear.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].norm1_context.linear.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].norm1_context.linear.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].norm1_context.linear.lora_A['default_0'], 140533116162144) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].norm1_context.linear.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].norm1_context.linear.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].norm1_context.linear.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].norm1_context.linear.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].norm1_context.linear.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].norm1_context.linear.lora_A['default_0'].weight, 140526666836992) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].norm1_context.linear.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].norm1_context.linear.lora_B, 140533116161904) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].norm1_context.linear.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].norm1_context.linear.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].norm1_context.linear.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].norm1_context.linear.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].norm1_context.linear.lora_B['default_0'], 140533116162048) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].norm1_context.linear.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].norm1_context.linear.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].norm1_context.linear.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].norm1_context.linear.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].norm1_context.linear.base_layer, 140581769897552) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].norm1_context.linear.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].norm1_context.linear.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].norm1_context.linear.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].norm1_context.linear.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].norm1_context.linear.lora_dropout, 140533116162000) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].norm1_context.linear.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].norm1_context.linear.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].norm1_context.linear.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].norm1_context.linear.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].norm1_context.linear.lora_dropout['default_0'], 140533116170688) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].norm1_context.linear.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].norm1_context.linear.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].norm1_context.linear.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].norm1_context.linear.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[14].norm1_context.linear.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[14].norm1_context.linear.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].norm1_context.linear.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[14].norm1_context.linear.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].norm1_context.linear.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[14].norm1_context.linear.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[14].norm1_context.linear.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].norm1_context.linear.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].norm1_context.linear.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].norm1_context.linear._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].norm1_context.linear._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].norm1_context.linear.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[14].norm1_context.linear.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[14].norm1_context.linear.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].norm1_context.linear._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].norm1_context.linear._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].norm1_context.linear._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].norm1_context.linear._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].norm1_context.linear._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[14].norm1_context.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[14].norm1_context.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].norm1_context._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].norm1_context._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].norm1_context._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].norm1_context._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].norm2_context, accessed_by=DictGetItemGuardAccessor(norm2_context) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].norm2_context, 140581769898608) # norm_encoder_hidden_states = self.norm2_context(encoder_hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:195 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].norm2_context.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14].norm2_context.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[14].norm2_context.training, 140591004393440) # norm_encoder_hidden_states = self.norm2_context(encoder_hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:195 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[14]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | +- GuardManager: source=L['self'].transformer_blocks[15], accessed_by=GetItemGuardAccessor(15) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15], 140581769896880) # for index_block, block in enumerate(self.transformer_blocks): # diffusers/src/diffusers/models/transformers/transformer_flux.py:471 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[15].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].training, 140591004393440) # for index_block, block in enumerate(self.transformer_blocks): # diffusers/src/diffusers/models/transformers/transformer_flux.py:471 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff, accessed_by=DictGetItemGuardAccessor(ff) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].ff, 140581769900528) # ff_output = self.ff(norm_hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:185 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[15].ff.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].ff.training, 140591004393440) # ff_output = self.ff(norm_hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:185 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff.net, accessed_by=DictGetItemGuardAccessor(net) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].ff.net, 140581769900768) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[15].ff.net, 93831537618768) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- LENGTH_CHECK: len(L['self'].transformer_blocks[15].ff.net) == 3 # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff.net.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff.net.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].ff.net.training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff.net[0], accessed_by=GetItemGuardAccessor(0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].ff.net[0], 140581769900720) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff.net[0].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[15].ff.net[0].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff.net[0].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].ff.net[0].training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff.net[0]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff.net[0].proj, accessed_by=DictGetItemGuardAccessor(proj) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].ff.net[0].proj, 140533116283504) # hidden_states = self.proj(hidden_states) # diffusers/src/diffusers/models/activations.py:88 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff.net[0].proj.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[15].ff.net[0].proj.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff.net[0].proj.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].ff.net[0].proj.training, 140591004393408) # hidden_states = self.proj(hidden_states) # diffusers/src/diffusers/models/activations.py:88 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff.net[0].proj._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff.net[0].proj.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].ff.net[0].proj.lora_A, 140533116273184) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff.net[0].proj.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff.net[0].proj.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].ff.net[0].proj.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff.net[0].proj.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].ff.net[0].proj.lora_A['default_0'], 140533116276112) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff.net[0].proj.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff.net[0].proj.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].ff.net[0].proj.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff.net[0].proj.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff.net[0].proj.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].ff.net[0].proj.lora_A['default_0'].weight, 140526657729968) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff.net[0].proj.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].ff.net[0].proj.lora_B, 140533116284128) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff.net[0].proj.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff.net[0].proj.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].ff.net[0].proj.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff.net[0].proj.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].ff.net[0].proj.lora_B['default_0'], 140533116288688) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff.net[0].proj.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff.net[0].proj.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].ff.net[0].proj.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff.net[0].proj.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].ff.net[0].proj.base_layer, 140581769900816) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff.net[0].proj.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff.net[0].proj.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].ff.net[0].proj.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff.net[0].proj.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].ff.net[0].proj.lora_dropout, 140533116279856) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff.net[0].proj.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff.net[0].proj.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].ff.net[0].proj.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff.net[0].proj.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].ff.net[0].proj.lora_dropout['default_0'], 140533116288016) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff.net[0].proj.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff.net[0].proj.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].ff.net[0].proj.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff.net[0].proj.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[15].ff.net[0].proj.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[15].ff.net[0].proj.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff.net[0].proj.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[15].ff.net[0].proj.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff.net[0].proj.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[15].ff.net[0].proj.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[15].ff.net[0].proj.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff.net[0].proj.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].ff.net[0].proj.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff.net[0].proj._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff.net[0].proj._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff.net[0].proj.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[15].ff.net[0].proj.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[15].ff.net[0].proj.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff.net[0].proj._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].ff.net[0].proj._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff.net[0].proj._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff.net[0].proj._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff.net[0].proj._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[15].ff.net[0].proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[15].ff.net[0].proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff.net[0].approximate, accessed_by=DictGetItemGuardAccessor(approximate) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[15].ff.net[0].approximate == 'tanh' # return F.gelu(gate, approximate=self.approximate) # diffusers/src/diffusers/models/activations.py:83 in gelu V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff.net[0]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff.net[0]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff.net[0]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff.net[0]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff.net[1], accessed_by=GetItemGuardAccessor(1) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].ff.net[1], 140581769900864) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff.net[1].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff.net[1].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].ff.net[1].training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff.net[2], accessed_by=GetItemGuardAccessor(2) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].ff.net[2], 140533116279808) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff.net[2].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[15].ff.net[2].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff.net[2].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].ff.net[2].training, 140591004393408) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff.net[2]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff.net[2].lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].ff.net[2].lora_A, 140533116280624) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff.net[2].lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff.net[2].lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].ff.net[2].lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff.net[2].lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].ff.net[2].lora_A['default_0'], 140533116197312) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff.net[2].lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff.net[2].lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].ff.net[2].lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff.net[2].lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff.net[2].lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].ff.net[2].lora_A['default_0'].weight, 140526657721168) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff.net[2].lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].ff.net[2].lora_B, 140533116283600) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff.net[2].lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff.net[2].lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].ff.net[2].lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff.net[2].lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].ff.net[2].lora_B['default_0'], 140533116200864) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff.net[2].lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff.net[2].lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].ff.net[2].lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff.net[2].base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].ff.net[2].base_layer, 140581769900912) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff.net[2].base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff.net[2].base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].ff.net[2].base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff.net[2].lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].ff.net[2].lora_dropout, 140533116283264) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff.net[2].lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff.net[2].lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].ff.net[2].lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff.net[2].lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].ff.net[2].lora_dropout['default_0'], 140533116277120) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff.net[2].lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff.net[2].lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].ff.net[2].lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff.net[2].scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[15].ff.net[2].scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[15].ff.net[2].scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff.net[2].scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[15].ff.net[2].scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff.net[2].use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[15].ff.net[2].use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[15].ff.net[2].use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff.net[2].use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].ff.net[2].use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff.net[2]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff.net[2]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff.net[2].merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[15].ff.net[2].merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[15].ff.net[2].merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff.net[2]._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].ff.net[2]._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff.net[2]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff.net[2]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff.net[2]._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[15].ff.net[2]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[15].ff.net[2]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn, accessed_by=DictGetItemGuardAccessor(attn) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn, 140581769899760) # attn_output, context_attn_output = self.attn( # diffusers/src/diffusers/models/transformers/transformer_flux.py:172 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[15].attn.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.training, 140591004393440) # attn_output, context_attn_output = self.attn( # diffusers/src/diffusers/models/transformers/transformer_flux.py:172 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_k, accessed_by=DictGetItemGuardAccessor(to_k) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.to_k, 140533116769456) # key = attn.to_k(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1717 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[15].attn.to_k.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.to_k.training, 140591004393408) # key = attn.to_k(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1717 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_k._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_k.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.to_k.lora_A, 140533116019344) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_k.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_k.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.to_k.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_k.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.to_k.lora_A['default_0'], 140533116015888) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_k.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_k.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.to_k.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_k.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_k.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.to_k.lora_A['default_0'].weight, 140526777872880) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_k.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.to_k.lora_B, 140533116020304) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_k.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_k.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.to_k.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_k.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.to_k.lora_B['default_0'], 140533116015696) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_k.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_k.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.to_k.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_k.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.to_k.base_layer, 140581769899904) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_k.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_k.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.to_k.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_k.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.to_k.lora_dropout, 140533116022080) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_k.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_k.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.to_k.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_k.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.to_k.lora_dropout['default_0'], 140533116018912) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_k.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_k.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.to_k.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_k.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[15].attn.to_k.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[15].attn.to_k.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_k.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[15].attn.to_k.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_k.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[15].attn.to_k.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[15].attn.to_k.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_k.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.to_k.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_k._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_k._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_k.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[15].attn.to_k.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[15].attn.to_k.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_k._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.to_k._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_k._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_k._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_k._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[15].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[15].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_q, accessed_by=DictGetItemGuardAccessor(to_q) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.to_q, 140533117416400) # query = attn.to_q(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1716 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[15].attn.to_q.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.to_q.training, 140591004393408) # query = attn.to_q(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1716 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_q._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_q.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.to_q.lora_A, 140533117414432) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_q.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_q.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.to_q.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_q.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.to_q.lora_A['default_0'], 140533117408096) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_q.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_q.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.to_q.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_q.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_q.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.to_q.lora_A['default_0'].weight, 140526694219456) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_q.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.to_q.lora_B, 140533117408192) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_q.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_q.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.to_q.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_q.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.to_q.lora_B['default_0'], 140533117408144) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_q.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_q.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.to_q.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_q.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.to_q.base_layer, 140581769900000) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_q.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_q.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.to_q.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_q.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.to_q.lora_dropout, 140533117414720) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_q.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_q.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.to_q.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_q.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.to_q.lora_dropout['default_0'], 140533117408000) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_q.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_q.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.to_q.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_q.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[15].attn.to_q.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[15].attn.to_q.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_q.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[15].attn.to_q.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_q.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[15].attn.to_q.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[15].attn.to_q.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_q.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.to_q.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_q._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_q._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_q.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[15].attn.to_q.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[15].attn.to_q.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_q._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.to_q._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_q._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_q._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_q._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[15].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[15].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_v, accessed_by=DictGetItemGuardAccessor(to_v) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.to_v, 140533116354672) # value = attn.to_v(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1718 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_v.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[15].attn.to_v.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_v.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.to_v.training, 140591004393408) # value = attn.to_v(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1718 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_v._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_v.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.to_v.lora_A, 140533116365424) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_v.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_v.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.to_v.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_v.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.to_v.lora_A['default_0'], 140533116359376) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_v.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_v.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.to_v.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_v.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_v.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.to_v.lora_A['default_0'].weight, 140526777868240) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_v.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.to_v.lora_B, 140533116364944) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_v.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_v.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.to_v.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_v.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.to_v.lora_B['default_0'], 140533116361872) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_v.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_v.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.to_v.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_v.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.to_v.base_layer, 140581769900096) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_v.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_v.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.to_v.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_v.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.to_v.lora_dropout, 140533116355824) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_v.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_v.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.to_v.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_v.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.to_v.lora_dropout['default_0'], 140533116367248) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_v.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_v.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.to_v.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_v.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[15].attn.to_v.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[15].attn.to_v.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_v.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[15].attn.to_v.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_v.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[15].attn.to_v.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[15].attn.to_v.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_v.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.to_v.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_v._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_v._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_v.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[15].attn.to_v.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[15].attn.to_v.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_v._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.to_v._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_v._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_v._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_v._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[15].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[15].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.norm_k, accessed_by=DictGetItemGuardAccessor(norm_k) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.norm_k, 140581769899952) # if attn.norm_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1729 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.norm_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[15].attn.norm_k.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.norm_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.norm_k.training, 140591004393440) # if attn.norm_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1729 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.norm_k.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[15].attn.norm_k.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.norm_k._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.norm_k.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.norm_k.weight, 140581765900192) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.norm_k._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.norm_k._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.norm_k._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.norm_k._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.norm_q, accessed_by=DictGetItemGuardAccessor(norm_q) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.norm_q, 140581769899856) # if attn.norm_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1727 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.norm_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[15].attn.norm_q.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.norm_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.norm_q.training, 140591004393440) # if attn.norm_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1727 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.norm_q.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[15].attn.norm_q.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.norm_q._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.norm_q.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.norm_q.weight, 140581773243424) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.norm_q._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.norm_q._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.norm_q._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.norm_q._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_out, accessed_by=DictGetItemGuardAccessor(to_out) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.to_out, 140581769900288) # hidden_states = attn.to_out[0](hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1776 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_out.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_out.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.to_out.training, 140591004393440) # hidden_states = attn.to_out[0](hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1776 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_out[0], accessed_by=GetItemGuardAccessor(0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.to_out[0], 140533115735968) # hidden_states = attn.to_out[0](hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1776 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_out[0].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[15].attn.to_out[0].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_out[0].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.to_out[0].training, 140591004393408) # hidden_states = attn.to_out[0](hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1776 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_out[0]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_out[0].lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.to_out[0].lora_A, 140533115746096) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_out[0].lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_out[0].lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.to_out[0].lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_out[0].lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.to_out[0].lora_A['default_0'], 140533116270192) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_out[0].lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_out[0].lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.to_out[0].lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_out[0].lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_out[0].lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.to_out[0].lora_A['default_0'].weight, 140526659963296) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_out[0].lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.to_out[0].lora_B, 140533115732944) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_out[0].lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_out[0].lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.to_out[0].lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_out[0].lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.to_out[0].lora_B['default_0'], 140533116266688) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_out[0].lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_out[0].lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.to_out[0].lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_out[0].base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.to_out[0].base_layer, 140581769900336) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_out[0].base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_out[0].base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.to_out[0].base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_out[0].lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.to_out[0].lora_dropout, 140533115742352) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_out[0].lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_out[0].lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.to_out[0].lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_out[0].lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.to_out[0].lora_dropout['default_0'], 140533115735200) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_out[0].lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_out[0].lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.to_out[0].lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_out[0].scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[15].attn.to_out[0].scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[15].attn.to_out[0].scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_out[0].scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[15].attn.to_out[0].scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_out[0].use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[15].attn.to_out[0].use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[15].attn.to_out[0].use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_out[0].use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.to_out[0].use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_out[0]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_out[0]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_out[0].merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[15].attn.to_out[0].merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[15].attn.to_out[0].merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_out[0]._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.to_out[0]._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_out[0]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_out[0]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_out[0]._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[15].attn.to_out[0]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[15].attn.to_out[0]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_out[1], accessed_by=GetItemGuardAccessor(1) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.to_out[1], 140581769900384) # hidden_states = attn.to_out[1](hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1778 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_out[1].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_out[1].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.to_out[1].training, 140591004393440) # hidden_states = attn.to_out[1](hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1778 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.add_k_proj, accessed_by=DictGetItemGuardAccessor(add_k_proj) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.add_k_proj, 140533116367536) # encoder_hidden_states_key_proj = attn.add_k_proj(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1736 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.add_k_proj.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[15].attn.add_k_proj.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.add_k_proj.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.add_k_proj.training, 140591004393408) # encoder_hidden_states_key_proj = attn.add_k_proj(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1736 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.add_k_proj._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.add_k_proj.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.add_k_proj.lora_A, 140533116361488) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.add_k_proj.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.add_k_proj.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.add_k_proj.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.add_k_proj.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.add_k_proj.lora_A['default_0'], 140533116356544) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.add_k_proj.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.add_k_proj.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.add_k_proj.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.add_k_proj.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.add_k_proj.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.add_k_proj.lora_A['default_0'].weight, 140526777866000) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.add_k_proj.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.add_k_proj.lora_B, 140533116355344) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.add_k_proj.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.add_k_proj.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.add_k_proj.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.add_k_proj.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.add_k_proj.lora_B['default_0'], 140533116356064) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.add_k_proj.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.add_k_proj.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.add_k_proj.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.add_k_proj.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.add_k_proj.base_layer, 140581769900144) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.add_k_proj.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.add_k_proj.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.add_k_proj.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.add_k_proj.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.add_k_proj.lora_dropout, 140533116355440) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.add_k_proj.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.add_k_proj.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.add_k_proj.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.add_k_proj.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.add_k_proj.lora_dropout['default_0'], 140533116355200) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.add_k_proj.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.add_k_proj.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.add_k_proj.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.add_k_proj.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[15].attn.add_k_proj.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[15].attn.add_k_proj.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.add_k_proj.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[15].attn.add_k_proj.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.add_k_proj.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[15].attn.add_k_proj.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[15].attn.add_k_proj.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.add_k_proj.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.add_k_proj.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.add_k_proj._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.add_k_proj._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.add_k_proj.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[15].attn.add_k_proj.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[15].attn.add_k_proj.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.add_k_proj._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.add_k_proj._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.add_k_proj._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.add_k_proj._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.add_k_proj._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[15].attn.add_k_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[15].attn.add_k_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.add_q_proj, accessed_by=DictGetItemGuardAccessor(add_q_proj) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.add_q_proj, 140533116354624) # encoder_hidden_states_query_proj = attn.add_q_proj(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1735 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.add_q_proj.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[15].attn.add_q_proj.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.add_q_proj.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.add_q_proj.training, 140591004393408) # encoder_hidden_states_query_proj = attn.add_q_proj(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1735 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.add_q_proj._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.add_q_proj.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.add_q_proj.lora_A, 140533115735920) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.add_q_proj.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.add_q_proj.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.add_q_proj.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.add_q_proj.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.add_q_proj.lora_A['default_0'], 140533115734528) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.add_q_proj.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.add_q_proj.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.add_q_proj.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.add_q_proj.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.add_q_proj.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.add_q_proj.lora_A['default_0'].weight, 140526659974336) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.add_q_proj.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.add_q_proj.lora_B, 140533115734624) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.add_q_proj.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.add_q_proj.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.add_q_proj.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.add_q_proj.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.add_q_proj.lora_B['default_0'], 140533115736736) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.add_q_proj.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.add_q_proj.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.add_q_proj.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.add_q_proj.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.add_q_proj.base_layer, 140581769900240) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.add_q_proj.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.add_q_proj.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.add_q_proj.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.add_q_proj.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.add_q_proj.lora_dropout, 140533116358656) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.add_q_proj.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.add_q_proj.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.add_q_proj.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.add_q_proj.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.add_q_proj.lora_dropout['default_0'], 140533116356976) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.add_q_proj.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.add_q_proj.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.add_q_proj.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.add_q_proj.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[15].attn.add_q_proj.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[15].attn.add_q_proj.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.add_q_proj.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[15].attn.add_q_proj.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.add_q_proj.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[15].attn.add_q_proj.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[15].attn.add_q_proj.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.add_q_proj.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.add_q_proj.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.add_q_proj._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.add_q_proj._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.add_q_proj.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[15].attn.add_q_proj.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[15].attn.add_q_proj.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.add_q_proj._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.add_q_proj._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.add_q_proj._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.add_q_proj._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.add_q_proj._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[15].attn.add_q_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[15].attn.add_q_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.add_v_proj, accessed_by=DictGetItemGuardAccessor(add_v_proj) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.add_v_proj, 140533116357408) # encoder_hidden_states_value_proj = attn.add_v_proj(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1737 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.add_v_proj.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[15].attn.add_v_proj.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.add_v_proj.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.add_v_proj.training, 140591004393408) # encoder_hidden_states_value_proj = attn.add_v_proj(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1737 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.add_v_proj._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.add_v_proj.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.add_v_proj.lora_A, 140533116357888) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.add_v_proj.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.add_v_proj.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.add_v_proj.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.add_v_proj.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.add_v_proj.lora_A['default_0'], 140533116356016) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.add_v_proj.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.add_v_proj.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.add_v_proj.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.add_v_proj.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.add_v_proj.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.add_v_proj.lora_A['default_0'].weight, 140526659971216) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.add_v_proj.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.add_v_proj.lora_B, 140533116355872) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.add_v_proj.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.add_v_proj.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.add_v_proj.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.add_v_proj.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.add_v_proj.lora_B['default_0'], 140533116370896) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.add_v_proj.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.add_v_proj.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.add_v_proj.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.add_v_proj.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.add_v_proj.base_layer, 140581769900192) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.add_v_proj.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.add_v_proj.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.add_v_proj.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.add_v_proj.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.add_v_proj.lora_dropout, 140533116355104) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.add_v_proj.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.add_v_proj.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.add_v_proj.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.add_v_proj.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.add_v_proj.lora_dropout['default_0'], 140533116359616) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.add_v_proj.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.add_v_proj.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.add_v_proj.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.add_v_proj.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[15].attn.add_v_proj.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[15].attn.add_v_proj.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.add_v_proj.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[15].attn.add_v_proj.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.add_v_proj.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[15].attn.add_v_proj.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[15].attn.add_v_proj.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.add_v_proj.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.add_v_proj.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.add_v_proj._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.add_v_proj._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.add_v_proj.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[15].attn.add_v_proj.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[15].attn.add_v_proj.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.add_v_proj._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.add_v_proj._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.add_v_proj._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.add_v_proj._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.add_v_proj._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[15].attn.add_v_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[15].attn.add_v_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_add_out, accessed_by=DictGetItemGuardAccessor(to_add_out) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.to_add_out, 140533116271008) # encoder_hidden_states = attn.to_add_out(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1779 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_add_out.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[15].attn.to_add_out.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_add_out.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.to_add_out.training, 140591004393408) # encoder_hidden_states = attn.to_add_out(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1779 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_add_out._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_add_out.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.to_add_out.lora_A, 140533116261552) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_add_out.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_add_out.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.to_add_out.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_add_out.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.to_add_out.lora_A['default_0'], 140533116283840) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_add_out.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_add_out.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.to_add_out.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_add_out.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_add_out.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.to_add_out.lora_A['default_0'].weight, 140526659975616) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_add_out.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.to_add_out.lora_B, 140533116267792) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_add_out.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_add_out.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.to_add_out.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_add_out.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.to_add_out.lora_B['default_0'], 140533116273088) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_add_out.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_add_out.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.to_add_out.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_add_out.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.to_add_out.base_layer, 140581769900432) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_add_out.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_add_out.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.to_add_out.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_add_out.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.to_add_out.lora_dropout, 140533116266304) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_add_out.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_add_out.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.to_add_out.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_add_out.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.to_add_out.lora_dropout['default_0'], 140533116258912) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_add_out.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_add_out.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.to_add_out.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_add_out.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[15].attn.to_add_out.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[15].attn.to_add_out.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_add_out.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[15].attn.to_add_out.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_add_out.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[15].attn.to_add_out.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[15].attn.to_add_out.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_add_out.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.to_add_out.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_add_out._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_add_out._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_add_out.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[15].attn.to_add_out.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[15].attn.to_add_out.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_add_out._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.to_add_out._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_add_out._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_add_out._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.to_add_out._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[15].attn.to_add_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[15].attn.to_add_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.norm_added_k, accessed_by=DictGetItemGuardAccessor(norm_added_k) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.norm_added_k, 140581769900576) # if attn.norm_added_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1751 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.norm_added_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[15].attn.norm_added_k.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.norm_added_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.norm_added_k.training, 140591004393440) # if attn.norm_added_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1751 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.norm_added_k.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[15].attn.norm_added_k.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.norm_added_k._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.norm_added_k.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.norm_added_k.weight, 140581772718976) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.norm_added_k._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.norm_added_k._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.norm_added_k._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.norm_added_k._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.norm_added_q, accessed_by=DictGetItemGuardAccessor(norm_added_q) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.norm_added_q, 140581769900480) # if attn.norm_added_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1749 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.norm_added_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[15].attn.norm_added_q.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.norm_added_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.norm_added_q.training, 140591004393440) # if attn.norm_added_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1749 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.norm_added_q.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[15].attn.norm_added_q.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.norm_added_q._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.norm_added_q.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.norm_added_q.weight, 140581772751824) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.norm_added_q._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.norm_added_q._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.norm_added_q._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.norm_added_q._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.heads, accessed_by=DictGetItemGuardAccessor(heads) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[15].attn.heads == 24 # head_dim = inner_dim // attn.heads # diffusers/src/diffusers/models/attention_processor.py:1721 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn.processor, accessed_by=DictGetItemGuardAccessor(processor) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[15].attn.processor, 93831581524080) # attn_parameters = set(inspect.signature(self.processor.__call__).parameters.keys()) # diffusers/src/diffusers/models/attention_processor.py:479 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].attn.processor, 140581769899712) # return self.processor( # diffusers/src/diffusers/models/attention_processor.py:490 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].attn._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].norm1, accessed_by=DictGetItemGuardAccessor(norm1) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].norm1, 140581769899280) # norm_hidden_states, gate_msa, shift_mlp, scale_mlp, gate_mlp = self.norm1(hidden_states, emb=temb) # diffusers/src/diffusers/models/transformers/transformer_flux.py:165 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].norm1.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[15].norm1.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].norm1.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].norm1.training, 140591004393440) # norm_hidden_states, gate_msa, shift_mlp, scale_mlp, gate_mlp = self.norm1(hidden_states, emb=temb) # diffusers/src/diffusers/models/transformers/transformer_flux.py:165 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].norm1.emb, accessed_by=DictGetItemGuardAccessor(emb) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].norm1.emb, 140591004478624) # if self.emb is not None: # diffusers/src/diffusers/models/normalization.py:135 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].norm1._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].norm1.norm, accessed_by=DictGetItemGuardAccessor(norm) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].norm1.norm, 140581769899424) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:139 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].norm1.norm.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].norm1.norm.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].norm1.norm.training, 140591004393440) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:139 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].norm1.silu, accessed_by=DictGetItemGuardAccessor(silu) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].norm1.silu, 140581769899328) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].norm1.silu.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].norm1.silu.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].norm1.silu.training, 140591004393440) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].norm1.linear, accessed_by=DictGetItemGuardAccessor(linear) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].norm1.linear, 140533117874432) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].norm1.linear.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[15].norm1.linear.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].norm1.linear.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].norm1.linear.training, 140591004393408) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].norm1.linear._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].norm1.linear.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].norm1.linear.lora_A, 140533117876640) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].norm1.linear.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].norm1.linear.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].norm1.linear.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].norm1.linear.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].norm1.linear.lora_A['default_0'], 140533115922960) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].norm1.linear.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].norm1.linear.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].norm1.linear.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].norm1.linear.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].norm1.linear.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].norm1.linear.lora_A['default_0'].weight, 140526694212416) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].norm1.linear.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].norm1.linear.lora_B, 140533117870016) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].norm1.linear.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].norm1.linear.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].norm1.linear.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].norm1.linear.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].norm1.linear.lora_B['default_0'], 140533115921904) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].norm1.linear.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].norm1.linear.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].norm1.linear.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].norm1.linear.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].norm1.linear.base_layer, 140581769899376) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].norm1.linear.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].norm1.linear.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].norm1.linear.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].norm1.linear.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].norm1.linear.lora_dropout, 140533117868192) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].norm1.linear.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].norm1.linear.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].norm1.linear.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].norm1.linear.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].norm1.linear.lora_dropout['default_0'], 140533117867712) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].norm1.linear.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].norm1.linear.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].norm1.linear.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].norm1.linear.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[15].norm1.linear.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[15].norm1.linear.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].norm1.linear.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[15].norm1.linear.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].norm1.linear.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[15].norm1.linear.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[15].norm1.linear.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].norm1.linear.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].norm1.linear.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].norm1.linear._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].norm1.linear._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].norm1.linear.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[15].norm1.linear.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[15].norm1.linear.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].norm1.linear._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].norm1.linear._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].norm1.linear._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].norm1.linear._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].norm1.linear._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[15].norm1.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[15].norm1.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].norm1._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].norm1._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].norm1._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].norm1._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].norm2, accessed_by=DictGetItemGuardAccessor(norm2) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].norm2, 140581769900624) # norm_hidden_states = self.norm2(hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:182 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].norm2.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].norm2.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].norm2.training, 140591004393440) # norm_hidden_states = self.norm2(hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:182 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff_context, accessed_by=DictGetItemGuardAccessor(ff_context) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].ff_context, 140581769900960) # context_ff_output = self.ff_context(norm_encoder_hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:198 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff_context.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[15].ff_context.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff_context.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].ff_context.training, 140591004393440) # context_ff_output = self.ff_context(norm_encoder_hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:198 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff_context._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff_context.net, accessed_by=DictGetItemGuardAccessor(net) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].ff_context.net, 140581769901104) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[15].ff_context.net, 93831537618768) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- LENGTH_CHECK: len(L['self'].transformer_blocks[15].ff_context.net) == 3 # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff_context.net.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff_context.net.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].ff_context.net.training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff_context.net[0], accessed_by=GetItemGuardAccessor(0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].ff_context.net[0], 140581769901056) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff_context.net[0].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[15].ff_context.net[0].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff_context.net[0].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].ff_context.net[0].training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff_context.net[0]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff_context.net[0].proj, accessed_by=DictGetItemGuardAccessor(proj) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].ff_context.net[0].proj, 140533116198752) # hidden_states = self.proj(hidden_states) # diffusers/src/diffusers/models/activations.py:88 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff_context.net[0].proj.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[15].ff_context.net[0].proj.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff_context.net[0].proj.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].ff_context.net[0].proj.training, 140591004393408) # hidden_states = self.proj(hidden_states) # diffusers/src/diffusers/models/activations.py:88 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff_context.net[0].proj._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff_context.net[0].proj.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].ff_context.net[0].proj.lora_A, 140533116197888) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff_context.net[0].proj.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff_context.net[0].proj.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].ff_context.net[0].proj.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff_context.net[0].proj.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].ff_context.net[0].proj.lora_A['default_0'], 140533116191072) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff_context.net[0].proj.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff_context.net[0].proj.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].ff_context.net[0].proj.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff_context.net[0].proj.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff_context.net[0].proj.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].ff_context.net[0].proj.lora_A['default_0'].weight, 140526554196192) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff_context.net[0].proj.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].ff_context.net[0].proj.lora_B, 140533116198416) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff_context.net[0].proj.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff_context.net[0].proj.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].ff_context.net[0].proj.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff_context.net[0].proj.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].ff_context.net[0].proj.lora_B['default_0'], 140533116199568) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff_context.net[0].proj.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff_context.net[0].proj.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].ff_context.net[0].proj.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff_context.net[0].proj.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].ff_context.net[0].proj.base_layer, 140581769901152) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff_context.net[0].proj.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff_context.net[0].proj.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].ff_context.net[0].proj.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff_context.net[0].proj.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].ff_context.net[0].proj.lora_dropout, 140533116196688) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff_context.net[0].proj.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff_context.net[0].proj.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].ff_context.net[0].proj.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff_context.net[0].proj.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].ff_context.net[0].proj.lora_dropout['default_0'], 140533116197552) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff_context.net[0].proj.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff_context.net[0].proj.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].ff_context.net[0].proj.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff_context.net[0].proj.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[15].ff_context.net[0].proj.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[15].ff_context.net[0].proj.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff_context.net[0].proj.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[15].ff_context.net[0].proj.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff_context.net[0].proj.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[15].ff_context.net[0].proj.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[15].ff_context.net[0].proj.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff_context.net[0].proj.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].ff_context.net[0].proj.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff_context.net[0].proj._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff_context.net[0].proj._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff_context.net[0].proj.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[15].ff_context.net[0].proj.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[15].ff_context.net[0].proj.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff_context.net[0].proj._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].ff_context.net[0].proj._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff_context.net[0].proj._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff_context.net[0].proj._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff_context.net[0].proj._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[15].ff_context.net[0].proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[15].ff_context.net[0].proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff_context.net[0].approximate, accessed_by=DictGetItemGuardAccessor(approximate) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[15].ff_context.net[0].approximate == 'tanh' # return F.gelu(gate, approximate=self.approximate) # diffusers/src/diffusers/models/activations.py:83 in gelu V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff_context.net[0]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff_context.net[0]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff_context.net[0]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff_context.net[0]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff_context.net[1], accessed_by=GetItemGuardAccessor(1) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].ff_context.net[1], 140581769901248) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff_context.net[1].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff_context.net[1].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].ff_context.net[1].training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff_context.net[2], accessed_by=GetItemGuardAccessor(2) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].ff_context.net[2], 140533116195344) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff_context.net[2].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[15].ff_context.net[2].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff_context.net[2].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].ff_context.net[2].training, 140591004393408) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff_context.net[2]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff_context.net[2].lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].ff_context.net[2].lora_A, 140533116195920) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff_context.net[2].lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff_context.net[2].lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].ff_context.net[2].lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff_context.net[2].lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].ff_context.net[2].lora_A['default_0'], 140533116193568) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff_context.net[2].lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff_context.net[2].lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].ff_context.net[2].lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff_context.net[2].lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff_context.net[2].lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].ff_context.net[2].lora_A['default_0'].weight, 140526554190512) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff_context.net[2].lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].ff_context.net[2].lora_B, 140533116197408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff_context.net[2].lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff_context.net[2].lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].ff_context.net[2].lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff_context.net[2].lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].ff_context.net[2].lora_B['default_0'], 140533116195248) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff_context.net[2].lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff_context.net[2].lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].ff_context.net[2].lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff_context.net[2].base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].ff_context.net[2].base_layer, 140581769901296) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff_context.net[2].base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff_context.net[2].base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].ff_context.net[2].base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff_context.net[2].lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].ff_context.net[2].lora_dropout, 140533116197216) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff_context.net[2].lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff_context.net[2].lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].ff_context.net[2].lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff_context.net[2].lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].ff_context.net[2].lora_dropout['default_0'], 140533116194480) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff_context.net[2].lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff_context.net[2].lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].ff_context.net[2].lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff_context.net[2].scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[15].ff_context.net[2].scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[15].ff_context.net[2].scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff_context.net[2].scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[15].ff_context.net[2].scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff_context.net[2].use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[15].ff_context.net[2].use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[15].ff_context.net[2].use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff_context.net[2].use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].ff_context.net[2].use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff_context.net[2]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff_context.net[2]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff_context.net[2].merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[15].ff_context.net[2].merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[15].ff_context.net[2].merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff_context.net[2]._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].ff_context.net[2]._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff_context.net[2]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff_context.net[2]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff_context.net[2]._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[15].ff_context.net[2]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[15].ff_context.net[2]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff_context._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff_context._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff_context._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].ff_context._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].norm1_context, accessed_by=DictGetItemGuardAccessor(norm1_context) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].norm1_context, 140581769899472) # norm_encoder_hidden_states, c_gate_msa, c_shift_mlp, c_scale_mlp, c_gate_mlp = self.norm1_context( # diffusers/src/diffusers/models/transformers/transformer_flux.py:167 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].norm1_context.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[15].norm1_context.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].norm1_context.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].norm1_context.training, 140591004393440) # norm_encoder_hidden_states, c_gate_msa, c_shift_mlp, c_scale_mlp, c_gate_mlp = self.norm1_context( # diffusers/src/diffusers/models/transformers/transformer_flux.py:167 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].norm1_context.emb, accessed_by=DictGetItemGuardAccessor(emb) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].norm1_context.emb, 140591004478624) # if self.emb is not None: # diffusers/src/diffusers/models/normalization.py:135 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].norm1_context._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].norm1_context.norm, accessed_by=DictGetItemGuardAccessor(norm) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].norm1_context.norm, 140581769899664) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:139 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].norm1_context.norm.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].norm1_context.norm.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].norm1_context.norm.training, 140591004393440) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:139 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].norm1_context.silu, accessed_by=DictGetItemGuardAccessor(silu) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].norm1_context.silu, 140581769899568) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].norm1_context.silu.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].norm1_context.silu.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].norm1_context.silu.training, 140591004393440) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].norm1_context.linear, accessed_by=DictGetItemGuardAccessor(linear) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].norm1_context.linear, 140533115921472) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].norm1_context.linear.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[15].norm1_context.linear.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].norm1_context.linear.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].norm1_context.linear.training, 140591004393408) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].norm1_context.linear._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].norm1_context.linear.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].norm1_context.linear.lora_A, 140533117407136) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].norm1_context.linear.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].norm1_context.linear.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].norm1_context.linear.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].norm1_context.linear.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].norm1_context.linear.lora_A['default_0'], 140533117418944) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].norm1_context.linear.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].norm1_context.linear.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].norm1_context.linear.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].norm1_context.linear.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].norm1_context.linear.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].norm1_context.linear.lora_A['default_0'].weight, 140526694213536) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].norm1_context.linear.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].norm1_context.linear.lora_B, 140533117418272) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].norm1_context.linear.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].norm1_context.linear.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].norm1_context.linear.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].norm1_context.linear.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].norm1_context.linear.lora_B['default_0'], 140533117403488) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].norm1_context.linear.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].norm1_context.linear.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].norm1_context.linear.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].norm1_context.linear.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].norm1_context.linear.base_layer, 140581769899616) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].norm1_context.linear.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].norm1_context.linear.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].norm1_context.linear.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].norm1_context.linear.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].norm1_context.linear.lora_dropout, 140533117417456) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].norm1_context.linear.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].norm1_context.linear.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].norm1_context.linear.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].norm1_context.linear.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].norm1_context.linear.lora_dropout['default_0'], 140533115916240) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].norm1_context.linear.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].norm1_context.linear.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].norm1_context.linear.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].norm1_context.linear.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[15].norm1_context.linear.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[15].norm1_context.linear.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].norm1_context.linear.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[15].norm1_context.linear.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].norm1_context.linear.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[15].norm1_context.linear.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[15].norm1_context.linear.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].norm1_context.linear.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].norm1_context.linear.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].norm1_context.linear._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].norm1_context.linear._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].norm1_context.linear.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[15].norm1_context.linear.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[15].norm1_context.linear.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].norm1_context.linear._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].norm1_context.linear._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].norm1_context.linear._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].norm1_context.linear._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].norm1_context.linear._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[15].norm1_context.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[15].norm1_context.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].norm1_context._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].norm1_context._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].norm1_context._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].norm1_context._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].norm2_context, accessed_by=DictGetItemGuardAccessor(norm2_context) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].norm2_context, 140581769900672) # norm_encoder_hidden_states = self.norm2_context(encoder_hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:195 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].norm2_context.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15].norm2_context.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[15].norm2_context.training, 140591004393440) # norm_encoder_hidden_states = self.norm2_context(encoder_hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:195 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[15]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | +- GuardManager: source=L['self'].transformer_blocks[16], accessed_by=GetItemGuardAccessor(16) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16], 140581769898944) # for index_block, block in enumerate(self.transformer_blocks): # diffusers/src/diffusers/models/transformers/transformer_flux.py:471 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[16].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].training, 140591004393440) # for index_block, block in enumerate(self.transformer_blocks): # diffusers/src/diffusers/models/transformers/transformer_flux.py:471 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff, accessed_by=DictGetItemGuardAccessor(ff) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].ff, 140581769902592) # ff_output = self.ff(norm_hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:185 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[16].ff.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].ff.training, 140591004393440) # ff_output = self.ff(norm_hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:185 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff.net, accessed_by=DictGetItemGuardAccessor(net) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].ff.net, 140581769902832) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[16].ff.net, 93831537618768) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- LENGTH_CHECK: len(L['self'].transformer_blocks[16].ff.net) == 3 # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff.net.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff.net.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].ff.net.training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff.net[0], accessed_by=GetItemGuardAccessor(0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].ff.net[0], 140581769902784) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff.net[0].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[16].ff.net[0].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff.net[0].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].ff.net[0].training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff.net[0]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff.net[0].proj, accessed_by=DictGetItemGuardAccessor(proj) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].ff.net[0].proj, 140533116470560) # hidden_states = self.proj(hidden_states) # diffusers/src/diffusers/models/activations.py:88 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff.net[0].proj.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[16].ff.net[0].proj.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff.net[0].proj.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].ff.net[0].proj.training, 140591004393408) # hidden_states = self.proj(hidden_states) # diffusers/src/diffusers/models/activations.py:88 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff.net[0].proj._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff.net[0].proj.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].ff.net[0].proj.lora_A, 140533116078400) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff.net[0].proj.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff.net[0].proj.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].ff.net[0].proj.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff.net[0].proj.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].ff.net[0].proj.lora_A['default_0'], 140533116076672) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff.net[0].proj.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff.net[0].proj.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].ff.net[0].proj.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff.net[0].proj.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff.net[0].proj.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].ff.net[0].proj.lora_A['default_0'].weight, 140531259421488) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff.net[0].proj.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].ff.net[0].proj.lora_B, 140533116084832) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff.net[0].proj.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff.net[0].proj.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].ff.net[0].proj.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff.net[0].proj.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].ff.net[0].proj.lora_B['default_0'], 140533116090544) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff.net[0].proj.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff.net[0].proj.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].ff.net[0].proj.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff.net[0].proj.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].ff.net[0].proj.base_layer, 140581769902880) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff.net[0].proj.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff.net[0].proj.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].ff.net[0].proj.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff.net[0].proj.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].ff.net[0].proj.lora_dropout, 140533116472384) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff.net[0].proj.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff.net[0].proj.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].ff.net[0].proj.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff.net[0].proj.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].ff.net[0].proj.lora_dropout['default_0'], 140533116469888) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff.net[0].proj.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff.net[0].proj.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].ff.net[0].proj.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff.net[0].proj.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[16].ff.net[0].proj.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[16].ff.net[0].proj.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff.net[0].proj.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[16].ff.net[0].proj.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff.net[0].proj.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[16].ff.net[0].proj.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[16].ff.net[0].proj.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff.net[0].proj.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].ff.net[0].proj.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff.net[0].proj._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff.net[0].proj._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff.net[0].proj.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[16].ff.net[0].proj.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[16].ff.net[0].proj.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff.net[0].proj._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].ff.net[0].proj._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff.net[0].proj._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff.net[0].proj._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff.net[0].proj._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[16].ff.net[0].proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[16].ff.net[0].proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff.net[0].approximate, accessed_by=DictGetItemGuardAccessor(approximate) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[16].ff.net[0].approximate == 'tanh' # return F.gelu(gate, approximate=self.approximate) # diffusers/src/diffusers/models/activations.py:83 in gelu V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff.net[0]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff.net[0]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff.net[0]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff.net[0]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff.net[1], accessed_by=GetItemGuardAccessor(1) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].ff.net[1], 140581769902928) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff.net[1].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff.net[1].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].ff.net[1].training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff.net[2], accessed_by=GetItemGuardAccessor(2) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].ff.net[2], 140533116085696) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff.net[2].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[16].ff.net[2].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff.net[2].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].ff.net[2].training, 140591004393408) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff.net[2]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff.net[2].lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].ff.net[2].lora_A, 140533116089392) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff.net[2].lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff.net[2].lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].ff.net[2].lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff.net[2].lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].ff.net[2].lora_A['default_0'], 140533116086368) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff.net[2].lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff.net[2].lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].ff.net[2].lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff.net[2].lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff.net[2].lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].ff.net[2].lora_A['default_0'].weight, 140531259423088) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff.net[2].lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].ff.net[2].lora_B, 140533116084544) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff.net[2].lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff.net[2].lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].ff.net[2].lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff.net[2].lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].ff.net[2].lora_B['default_0'], 140533116083056) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff.net[2].lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff.net[2].lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].ff.net[2].lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff.net[2].base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].ff.net[2].base_layer, 140581769902976) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff.net[2].base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff.net[2].base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].ff.net[2].base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff.net[2].lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].ff.net[2].lora_dropout, 140533116090352) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff.net[2].lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff.net[2].lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].ff.net[2].lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff.net[2].lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].ff.net[2].lora_dropout['default_0'], 140533116089920) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff.net[2].lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff.net[2].lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].ff.net[2].lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff.net[2].scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[16].ff.net[2].scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[16].ff.net[2].scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff.net[2].scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[16].ff.net[2].scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff.net[2].use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[16].ff.net[2].use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[16].ff.net[2].use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff.net[2].use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].ff.net[2].use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff.net[2]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff.net[2]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff.net[2].merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[16].ff.net[2].merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[16].ff.net[2].merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff.net[2]._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].ff.net[2]._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff.net[2]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff.net[2]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff.net[2]._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[16].ff.net[2]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[16].ff.net[2]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn, accessed_by=DictGetItemGuardAccessor(attn) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn, 140581769901824) # attn_output, context_attn_output = self.attn( # diffusers/src/diffusers/models/transformers/transformer_flux.py:172 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[16].attn.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.training, 140591004393440) # attn_output, context_attn_output = self.attn( # diffusers/src/diffusers/models/transformers/transformer_flux.py:172 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_k, accessed_by=DictGetItemGuardAccessor(to_k) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.to_k, 140533115973360) # key = attn.to_k(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1717 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[16].attn.to_k.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.to_k.training, 140591004393408) # key = attn.to_k(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1717 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_k._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_k.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.to_k.lora_A, 140533115970240) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_k.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_k.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.to_k.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_k.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.to_k.lora_A['default_0'], 140533115973936) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_k.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_k.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.to_k.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_k.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_k.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.to_k.lora_A['default_0'].weight, 140526663687904) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_k.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.to_k.lora_B, 140533115972544) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_k.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_k.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.to_k.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_k.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.to_k.lora_B['default_0'], 140533115972112) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_k.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_k.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.to_k.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_k.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.to_k.base_layer, 140581769901968) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_k.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_k.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.to_k.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_k.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.to_k.lora_dropout, 140533115971872) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_k.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_k.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.to_k.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_k.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.to_k.lora_dropout['default_0'], 140533115974416) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_k.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_k.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.to_k.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_k.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[16].attn.to_k.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[16].attn.to_k.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_k.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[16].attn.to_k.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_k.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[16].attn.to_k.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[16].attn.to_k.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_k.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.to_k.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_k._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_k._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_k.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[16].attn.to_k.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[16].attn.to_k.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_k._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.to_k._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_k._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_k._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_k._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[16].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[16].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_q, accessed_by=DictGetItemGuardAccessor(to_q) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.to_q, 140533115011184) # query = attn.to_q(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1716 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[16].attn.to_q.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.to_q.training, 140591004393408) # query = attn.to_q(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1716 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_q._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_q.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.to_q.lora_A, 140533115015360) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_q.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_q.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.to_q.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_q.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.to_q.lora_A['default_0'], 140533115017568) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_q.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_q.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.to_q.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_q.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_q.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.to_q.lora_A['default_0'].weight, 140526787429456) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_q.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.to_q.lora_B, 140533115024480) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_q.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_q.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.to_q.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_q.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.to_q.lora_B['default_0'], 140533115012192) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_q.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_q.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.to_q.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_q.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.to_q.base_layer, 140581769902064) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_q.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_q.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.to_q.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_q.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.to_q.lora_dropout, 140533115015456) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_q.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_q.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.to_q.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_q.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.to_q.lora_dropout['default_0'], 140533115024048) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_q.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_q.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.to_q.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_q.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[16].attn.to_q.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[16].attn.to_q.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_q.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[16].attn.to_q.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_q.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[16].attn.to_q.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[16].attn.to_q.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_q.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.to_q.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_q._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_q._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_q.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[16].attn.to_q.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[16].attn.to_q.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_q._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.to_q._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_q._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_q._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_q._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[16].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[16].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_v, accessed_by=DictGetItemGuardAccessor(to_v) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.to_v, 140533117405216) # value = attn.to_v(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1718 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_v.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[16].attn.to_v.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_v.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.to_v.training, 140591004393408) # value = attn.to_v(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1718 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_v._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_v.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.to_v.lora_A, 140533116609264) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_v.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_v.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.to_v.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_v.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.to_v.lora_A['default_0'], 140533116608976) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_v.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_v.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.to_v.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_v.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_v.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.to_v.lora_A['default_0'].weight, 140526663694384) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_v.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.to_v.lora_B, 140533116612048) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_v.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_v.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.to_v.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_v.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.to_v.lora_B['default_0'], 140533116484960) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_v.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_v.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.to_v.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_v.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.to_v.base_layer, 140581769902160) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_v.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_v.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.to_v.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_v.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.to_v.lora_dropout, 140533116602736) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_v.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_v.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.to_v.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_v.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.to_v.lora_dropout['default_0'], 140533116608880) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_v.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_v.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.to_v.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_v.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[16].attn.to_v.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[16].attn.to_v.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_v.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[16].attn.to_v.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_v.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[16].attn.to_v.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[16].attn.to_v.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_v.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.to_v.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_v._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_v._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_v.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[16].attn.to_v.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[16].attn.to_v.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_v._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.to_v._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_v._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_v._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_v._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[16].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[16].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.norm_k, accessed_by=DictGetItemGuardAccessor(norm_k) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.norm_k, 140581769902016) # if attn.norm_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1729 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.norm_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[16].attn.norm_k.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.norm_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.norm_k.training, 140591004393440) # if attn.norm_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1729 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.norm_k.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[16].attn.norm_k.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.norm_k._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.norm_k.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.norm_k.weight, 140581772749424) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.norm_k._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.norm_k._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.norm_k._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.norm_k._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.norm_q, accessed_by=DictGetItemGuardAccessor(norm_q) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.norm_q, 140581769901920) # if attn.norm_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1727 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.norm_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[16].attn.norm_q.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.norm_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.norm_q.training, 140591004393440) # if attn.norm_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1727 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.norm_q.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[16].attn.norm_q.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.norm_q._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.norm_q.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.norm_q.weight, 140581772748704) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.norm_q._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.norm_q._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.norm_q._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.norm_q._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_out, accessed_by=DictGetItemGuardAccessor(to_out) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.to_out, 140581769902352) # hidden_states = attn.to_out[0](hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1776 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_out.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_out.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.to_out.training, 140591004393440) # hidden_states = attn.to_out[0](hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1776 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_out[0], accessed_by=GetItemGuardAccessor(0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.to_out[0], 140533116474016) # hidden_states = attn.to_out[0](hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1776 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_out[0].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[16].attn.to_out[0].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_out[0].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.to_out[0].training, 140591004393408) # hidden_states = attn.to_out[0](hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1776 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_out[0]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_out[0].lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.to_out[0].lora_A, 140533116473344) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_out[0].lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_out[0].lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.to_out[0].lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_out[0].lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.to_out[0].lora_A['default_0'], 140533116471184) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_out[0].lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_out[0].lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.to_out[0].lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_out[0].lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_out[0].lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.to_out[0].lora_A['default_0'].weight, 140537325062880) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_out[0].lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.to_out[0].lora_B, 140533116482272) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_out[0].lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_out[0].lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.to_out[0].lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_out[0].lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.to_out[0].lora_B['default_0'], 140533116471808) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_out[0].lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_out[0].lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.to_out[0].lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_out[0].base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.to_out[0].base_layer, 140581769902400) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_out[0].base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_out[0].base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.to_out[0].base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_out[0].lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.to_out[0].lora_dropout, 140533116483952) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_out[0].lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_out[0].lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.to_out[0].lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_out[0].lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.to_out[0].lora_dropout['default_0'], 140533116473440) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_out[0].lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_out[0].lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.to_out[0].lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_out[0].scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[16].attn.to_out[0].scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[16].attn.to_out[0].scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_out[0].scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[16].attn.to_out[0].scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_out[0].use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[16].attn.to_out[0].use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[16].attn.to_out[0].use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_out[0].use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.to_out[0].use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_out[0]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_out[0]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_out[0].merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[16].attn.to_out[0].merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[16].attn.to_out[0].merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_out[0]._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.to_out[0]._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_out[0]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_out[0]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_out[0]._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[16].attn.to_out[0]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[16].attn.to_out[0]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_out[1], accessed_by=GetItemGuardAccessor(1) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.to_out[1], 140581769902448) # hidden_states = attn.to_out[1](hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1778 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_out[1].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_out[1].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.to_out[1].training, 140591004393440) # hidden_states = attn.to_out[1](hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1778 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.add_k_proj, accessed_by=DictGetItemGuardAccessor(add_k_proj) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.add_k_proj, 140533116485392) # encoder_hidden_states_key_proj = attn.add_k_proj(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1736 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.add_k_proj.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[16].attn.add_k_proj.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.add_k_proj.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.add_k_proj.training, 140591004393408) # encoder_hidden_states_key_proj = attn.add_k_proj(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1736 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.add_k_proj._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.add_k_proj.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.add_k_proj.lora_A, 140533116483472) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.add_k_proj.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.add_k_proj.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.add_k_proj.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.add_k_proj.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.add_k_proj.lora_A['default_0'], 140533116482368) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.add_k_proj.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.add_k_proj.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.add_k_proj.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.add_k_proj.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.add_k_proj.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.add_k_proj.lora_A['default_0'].weight, 140526663689664) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.add_k_proj.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.add_k_proj.lora_B, 140533116485440) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.add_k_proj.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.add_k_proj.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.add_k_proj.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.add_k_proj.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.add_k_proj.lora_B['default_0'], 140533116482896) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.add_k_proj.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.add_k_proj.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.add_k_proj.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.add_k_proj.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.add_k_proj.base_layer, 140581769902208) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.add_k_proj.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.add_k_proj.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.add_k_proj.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.add_k_proj.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.add_k_proj.lora_dropout, 140533116485152) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.add_k_proj.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.add_k_proj.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.add_k_proj.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.add_k_proj.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.add_k_proj.lora_dropout['default_0'], 140533116484336) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.add_k_proj.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.add_k_proj.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.add_k_proj.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.add_k_proj.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[16].attn.add_k_proj.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[16].attn.add_k_proj.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.add_k_proj.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[16].attn.add_k_proj.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.add_k_proj.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[16].attn.add_k_proj.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[16].attn.add_k_proj.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.add_k_proj.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.add_k_proj.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.add_k_proj._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.add_k_proj._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.add_k_proj.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[16].attn.add_k_proj.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[16].attn.add_k_proj.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.add_k_proj._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.add_k_proj._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.add_k_proj._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.add_k_proj._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.add_k_proj._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[16].attn.add_k_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[16].attn.add_k_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.add_q_proj, accessed_by=DictGetItemGuardAccessor(add_q_proj) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.add_q_proj, 140533116474304) # encoder_hidden_states_query_proj = attn.add_q_proj(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1735 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.add_q_proj.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[16].attn.add_q_proj.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.add_q_proj.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.add_q_proj.training, 140591004393408) # encoder_hidden_states_query_proj = attn.add_q_proj(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1735 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.add_q_proj._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.add_q_proj.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.add_q_proj.lora_A, 140533116473536) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.add_q_proj.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.add_q_proj.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.add_q_proj.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.add_q_proj.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.add_q_proj.lora_A['default_0'], 140533116484000) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.add_q_proj.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.add_q_proj.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.add_q_proj.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.add_q_proj.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.add_q_proj.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.add_q_proj.lora_A['default_0'].weight, 140537325068880) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.add_q_proj.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.add_q_proj.lora_B, 140533116469984) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.add_q_proj.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.add_q_proj.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.add_q_proj.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.add_q_proj.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.add_q_proj.lora_B['default_0'], 140533116473296) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.add_q_proj.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.add_q_proj.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.add_q_proj.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.add_q_proj.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.add_q_proj.base_layer, 140581769902304) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.add_q_proj.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.add_q_proj.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.add_q_proj.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.add_q_proj.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.add_q_proj.lora_dropout, 140533116482944) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.add_q_proj.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.add_q_proj.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.add_q_proj.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.add_q_proj.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.add_q_proj.lora_dropout['default_0'], 140533116474256) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.add_q_proj.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.add_q_proj.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.add_q_proj.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.add_q_proj.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[16].attn.add_q_proj.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[16].attn.add_q_proj.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.add_q_proj.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[16].attn.add_q_proj.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.add_q_proj.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[16].attn.add_q_proj.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[16].attn.add_q_proj.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.add_q_proj.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.add_q_proj.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.add_q_proj._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.add_q_proj._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.add_q_proj.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[16].attn.add_q_proj.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[16].attn.add_q_proj.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.add_q_proj._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.add_q_proj._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.add_q_proj._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.add_q_proj._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.add_q_proj._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[16].attn.add_q_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[16].attn.add_q_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.add_v_proj, accessed_by=DictGetItemGuardAccessor(add_v_proj) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.add_v_proj, 140533116484480) # encoder_hidden_states_value_proj = attn.add_v_proj(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1737 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.add_v_proj.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[16].attn.add_v_proj.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.add_v_proj.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.add_v_proj.training, 140591004393408) # encoder_hidden_states_value_proj = attn.add_v_proj(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1737 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.add_v_proj._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.add_v_proj.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.add_v_proj.lora_A, 140533116481216) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.add_v_proj.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.add_v_proj.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.add_v_proj.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.add_v_proj.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.add_v_proj.lora_A['default_0'], 140533116474112) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.add_v_proj.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.add_v_proj.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.add_v_proj.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.add_v_proj.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.add_v_proj.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.add_v_proj.lora_A['default_0'].weight, 140531316021248) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.add_v_proj.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.add_v_proj.lora_B, 140533116485104) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.add_v_proj.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.add_v_proj.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.add_v_proj.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.add_v_proj.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.add_v_proj.lora_B['default_0'], 140533116482032) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.add_v_proj.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.add_v_proj.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.add_v_proj.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.add_v_proj.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.add_v_proj.base_layer, 140581769902256) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.add_v_proj.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.add_v_proj.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.add_v_proj.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.add_v_proj.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.add_v_proj.lora_dropout, 140533116473968) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.add_v_proj.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.add_v_proj.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.add_v_proj.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.add_v_proj.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.add_v_proj.lora_dropout['default_0'], 140533116480016) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.add_v_proj.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.add_v_proj.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.add_v_proj.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.add_v_proj.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[16].attn.add_v_proj.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[16].attn.add_v_proj.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.add_v_proj.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[16].attn.add_v_proj.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.add_v_proj.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[16].attn.add_v_proj.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[16].attn.add_v_proj.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.add_v_proj.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.add_v_proj.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.add_v_proj._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.add_v_proj._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.add_v_proj.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[16].attn.add_v_proj.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[16].attn.add_v_proj.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.add_v_proj._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.add_v_proj._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.add_v_proj._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.add_v_proj._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.add_v_proj._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[16].attn.add_v_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[16].attn.add_v_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_add_out, accessed_by=DictGetItemGuardAccessor(to_add_out) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.to_add_out, 140533116482992) # encoder_hidden_states = attn.to_add_out(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1779 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_add_out.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[16].attn.to_add_out.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_add_out.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.to_add_out.training, 140591004393408) # encoder_hidden_states = attn.to_add_out(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1779 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_add_out._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_add_out.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.to_add_out.lora_A, 140533116482416) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_add_out.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_add_out.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.to_add_out.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_add_out.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.to_add_out.lora_A['default_0'], 140533116470704) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_add_out.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_add_out.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.to_add_out.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_add_out.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_add_out.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.to_add_out.lora_A['default_0'].weight, 140537325063840) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_add_out.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.to_add_out.lora_B, 140533116483280) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_add_out.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_add_out.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.to_add_out.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_add_out.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.to_add_out.lora_B['default_0'], 140533116482608) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_add_out.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_add_out.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.to_add_out.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_add_out.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.to_add_out.base_layer, 140581769902496) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_add_out.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_add_out.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.to_add_out.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_add_out.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.to_add_out.lora_dropout, 140533116472240) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_add_out.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_add_out.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.to_add_out.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_add_out.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.to_add_out.lora_dropout['default_0'], 140533116482224) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_add_out.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_add_out.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.to_add_out.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_add_out.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[16].attn.to_add_out.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[16].attn.to_add_out.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_add_out.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[16].attn.to_add_out.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_add_out.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[16].attn.to_add_out.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[16].attn.to_add_out.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_add_out.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.to_add_out.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_add_out._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_add_out._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_add_out.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[16].attn.to_add_out.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[16].attn.to_add_out.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_add_out._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.to_add_out._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_add_out._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_add_out._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.to_add_out._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[16].attn.to_add_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[16].attn.to_add_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.norm_added_k, accessed_by=DictGetItemGuardAccessor(norm_added_k) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.norm_added_k, 140581769902640) # if attn.norm_added_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1751 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.norm_added_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[16].attn.norm_added_k.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.norm_added_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.norm_added_k.training, 140591004393440) # if attn.norm_added_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1751 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.norm_added_k.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[16].attn.norm_added_k.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.norm_added_k._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.norm_added_k.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.norm_added_k.weight, 140581772749264) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.norm_added_k._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.norm_added_k._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.norm_added_k._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.norm_added_k._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.norm_added_q, accessed_by=DictGetItemGuardAccessor(norm_added_q) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.norm_added_q, 140581769902544) # if attn.norm_added_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1749 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.norm_added_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[16].attn.norm_added_q.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.norm_added_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.norm_added_q.training, 140591004393440) # if attn.norm_added_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1749 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.norm_added_q.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[16].attn.norm_added_q.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.norm_added_q._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.norm_added_q.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.norm_added_q.weight, 140581772749344) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.norm_added_q._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.norm_added_q._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.norm_added_q._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.norm_added_q._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.heads, accessed_by=DictGetItemGuardAccessor(heads) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[16].attn.heads == 24 # head_dim = inner_dim // attn.heads # diffusers/src/diffusers/models/attention_processor.py:1721 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn.processor, accessed_by=DictGetItemGuardAccessor(processor) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[16].attn.processor, 93831581524080) # attn_parameters = set(inspect.signature(self.processor.__call__).parameters.keys()) # diffusers/src/diffusers/models/attention_processor.py:479 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].attn.processor, 140581769901776) # return self.processor( # diffusers/src/diffusers/models/attention_processor.py:490 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].attn._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].norm1, accessed_by=DictGetItemGuardAccessor(norm1) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].norm1, 140581769901344) # norm_hidden_states, gate_msa, shift_mlp, scale_mlp, gate_mlp = self.norm1(hidden_states, emb=temb) # diffusers/src/diffusers/models/transformers/transformer_flux.py:165 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].norm1.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[16].norm1.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].norm1.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].norm1.training, 140591004393440) # norm_hidden_states, gate_msa, shift_mlp, scale_mlp, gate_mlp = self.norm1(hidden_states, emb=temb) # diffusers/src/diffusers/models/transformers/transformer_flux.py:165 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].norm1.emb, accessed_by=DictGetItemGuardAccessor(emb) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].norm1.emb, 140591004478624) # if self.emb is not None: # diffusers/src/diffusers/models/normalization.py:135 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].norm1._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].norm1.norm, accessed_by=DictGetItemGuardAccessor(norm) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].norm1.norm, 140581769901488) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:139 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].norm1.norm.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].norm1.norm.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].norm1.norm.training, 140591004393440) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:139 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].norm1.silu, accessed_by=DictGetItemGuardAccessor(silu) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].norm1.silu, 140581769901392) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].norm1.silu.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].norm1.silu.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].norm1.silu.training, 140591004393440) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].norm1.linear, accessed_by=DictGetItemGuardAccessor(linear) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].norm1.linear, 140533116192464) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].norm1.linear.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[16].norm1.linear.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].norm1.linear.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].norm1.linear.training, 140591004393408) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].norm1.linear._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].norm1.linear.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].norm1.linear.lora_A, 140533116193472) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].norm1.linear.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].norm1.linear.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].norm1.linear.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].norm1.linear.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].norm1.linear.lora_A['default_0'], 140533115024768) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].norm1.linear.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].norm1.linear.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].norm1.linear.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].norm1.linear.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].norm1.linear.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].norm1.linear.lora_A['default_0'].weight, 140526554191392) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].norm1.linear.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].norm1.linear.lora_B, 140533116193952) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].norm1.linear.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].norm1.linear.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].norm1.linear.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].norm1.linear.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].norm1.linear.lora_B['default_0'], 140533115015744) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].norm1.linear.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].norm1.linear.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].norm1.linear.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].norm1.linear.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].norm1.linear.base_layer, 140581769901440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].norm1.linear.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].norm1.linear.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].norm1.linear.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].norm1.linear.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].norm1.linear.lora_dropout, 140533116194192) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].norm1.linear.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].norm1.linear.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].norm1.linear.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].norm1.linear.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].norm1.linear.lora_dropout['default_0'], 140533116194096) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].norm1.linear.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].norm1.linear.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].norm1.linear.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].norm1.linear.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[16].norm1.linear.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[16].norm1.linear.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].norm1.linear.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[16].norm1.linear.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].norm1.linear.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[16].norm1.linear.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[16].norm1.linear.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].norm1.linear.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].norm1.linear.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].norm1.linear._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].norm1.linear._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].norm1.linear.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[16].norm1.linear.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[16].norm1.linear.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].norm1.linear._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].norm1.linear._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].norm1.linear._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].norm1.linear._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].norm1.linear._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[16].norm1.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[16].norm1.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].norm1._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].norm1._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].norm1._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].norm1._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].norm2, accessed_by=DictGetItemGuardAccessor(norm2) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].norm2, 140581769902688) # norm_hidden_states = self.norm2(hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:182 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].norm2.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].norm2.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].norm2.training, 140591004393440) # norm_hidden_states = self.norm2(hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:182 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff_context, accessed_by=DictGetItemGuardAccessor(ff_context) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].ff_context, 140581769903024) # context_ff_output = self.ff_context(norm_encoder_hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:198 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff_context.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[16].ff_context.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff_context.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].ff_context.training, 140591004393440) # context_ff_output = self.ff_context(norm_encoder_hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:198 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff_context._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff_context.net, accessed_by=DictGetItemGuardAccessor(net) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].ff_context.net, 140581769903168) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[16].ff_context.net, 93831537618768) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- LENGTH_CHECK: len(L['self'].transformer_blocks[16].ff_context.net) == 3 # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff_context.net.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff_context.net.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].ff_context.net.training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff_context.net[0], accessed_by=GetItemGuardAccessor(0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].ff_context.net[0], 140581769903120) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff_context.net[0].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[16].ff_context.net[0].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff_context.net[0].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].ff_context.net[0].training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff_context.net[0]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff_context.net[0].proj, accessed_by=DictGetItemGuardAccessor(proj) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].ff_context.net[0].proj, 140533116087472) # hidden_states = self.proj(hidden_states) # diffusers/src/diffusers/models/activations.py:88 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff_context.net[0].proj.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[16].ff_context.net[0].proj.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff_context.net[0].proj.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].ff_context.net[0].proj.training, 140591004393408) # hidden_states = self.proj(hidden_states) # diffusers/src/diffusers/models/activations.py:88 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff_context.net[0].proj._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff_context.net[0].proj.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].ff_context.net[0].proj.lora_A, 140533116076096) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff_context.net[0].proj.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff_context.net[0].proj.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].ff_context.net[0].proj.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff_context.net[0].proj.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].ff_context.net[0].proj.lora_A['default_0'], 140533116085552) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff_context.net[0].proj.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff_context.net[0].proj.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].ff_context.net[0].proj.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff_context.net[0].proj.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff_context.net[0].proj.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].ff_context.net[0].proj.lora_A['default_0'].weight, 140537323474832) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff_context.net[0].proj.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].ff_context.net[0].proj.lora_B, 140533116088624) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff_context.net[0].proj.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff_context.net[0].proj.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].ff_context.net[0].proj.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff_context.net[0].proj.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].ff_context.net[0].proj.lora_B['default_0'], 140533116080704) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff_context.net[0].proj.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff_context.net[0].proj.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].ff_context.net[0].proj.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff_context.net[0].proj.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].ff_context.net[0].proj.base_layer, 140581769903216) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff_context.net[0].proj.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff_context.net[0].proj.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].ff_context.net[0].proj.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff_context.net[0].proj.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].ff_context.net[0].proj.lora_dropout, 140533116087808) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff_context.net[0].proj.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff_context.net[0].proj.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].ff_context.net[0].proj.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff_context.net[0].proj.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].ff_context.net[0].proj.lora_dropout['default_0'], 140533116081280) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff_context.net[0].proj.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff_context.net[0].proj.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].ff_context.net[0].proj.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff_context.net[0].proj.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[16].ff_context.net[0].proj.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[16].ff_context.net[0].proj.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff_context.net[0].proj.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[16].ff_context.net[0].proj.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff_context.net[0].proj.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[16].ff_context.net[0].proj.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[16].ff_context.net[0].proj.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff_context.net[0].proj.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].ff_context.net[0].proj.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff_context.net[0].proj._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff_context.net[0].proj._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff_context.net[0].proj.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[16].ff_context.net[0].proj.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[16].ff_context.net[0].proj.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff_context.net[0].proj._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].ff_context.net[0].proj._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff_context.net[0].proj._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff_context.net[0].proj._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff_context.net[0].proj._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[16].ff_context.net[0].proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[16].ff_context.net[0].proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff_context.net[0].approximate, accessed_by=DictGetItemGuardAccessor(approximate) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[16].ff_context.net[0].approximate == 'tanh' # return F.gelu(gate, approximate=self.approximate) # diffusers/src/diffusers/models/activations.py:83 in gelu V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff_context.net[0]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff_context.net[0]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff_context.net[0]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff_context.net[0]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff_context.net[1], accessed_by=GetItemGuardAccessor(1) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].ff_context.net[1], 140581769903312) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff_context.net[1].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff_context.net[1].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].ff_context.net[1].training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff_context.net[2], accessed_by=GetItemGuardAccessor(2) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].ff_context.net[2], 140533116085168) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff_context.net[2].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[16].ff_context.net[2].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff_context.net[2].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].ff_context.net[2].training, 140591004393408) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff_context.net[2]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff_context.net[2].lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].ff_context.net[2].lora_A, 140533116076768) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff_context.net[2].lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff_context.net[2].lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].ff_context.net[2].lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff_context.net[2].lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].ff_context.net[2].lora_A['default_0'], 140533116086032) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff_context.net[2].lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff_context.net[2].lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].ff_context.net[2].lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff_context.net[2].lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff_context.net[2].lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].ff_context.net[2].lora_A['default_0'].weight, 140526654063696) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff_context.net[2].lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].ff_context.net[2].lora_B, 140533116088048) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff_context.net[2].lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff_context.net[2].lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].ff_context.net[2].lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff_context.net[2].lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].ff_context.net[2].lora_B['default_0'], 140533116086896) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff_context.net[2].lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff_context.net[2].lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].ff_context.net[2].lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff_context.net[2].base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].ff_context.net[2].base_layer, 140581769903360) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff_context.net[2].base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff_context.net[2].base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].ff_context.net[2].base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff_context.net[2].lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].ff_context.net[2].lora_dropout, 140533116086224) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff_context.net[2].lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff_context.net[2].lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].ff_context.net[2].lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff_context.net[2].lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].ff_context.net[2].lora_dropout['default_0'], 140533116076816) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff_context.net[2].lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff_context.net[2].lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].ff_context.net[2].lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff_context.net[2].scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[16].ff_context.net[2].scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[16].ff_context.net[2].scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff_context.net[2].scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[16].ff_context.net[2].scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff_context.net[2].use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[16].ff_context.net[2].use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[16].ff_context.net[2].use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff_context.net[2].use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].ff_context.net[2].use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff_context.net[2]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff_context.net[2]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff_context.net[2].merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[16].ff_context.net[2].merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[16].ff_context.net[2].merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff_context.net[2]._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].ff_context.net[2]._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff_context.net[2]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff_context.net[2]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff_context.net[2]._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[16].ff_context.net[2]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[16].ff_context.net[2]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff_context._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff_context._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff_context._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].ff_context._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].norm1_context, accessed_by=DictGetItemGuardAccessor(norm1_context) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].norm1_context, 140581769901536) # norm_encoder_hidden_states, c_gate_msa, c_shift_mlp, c_scale_mlp, c_gate_mlp = self.norm1_context( # diffusers/src/diffusers/models/transformers/transformer_flux.py:167 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].norm1_context.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[16].norm1_context.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].norm1_context.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].norm1_context.training, 140591004393440) # norm_encoder_hidden_states, c_gate_msa, c_shift_mlp, c_scale_mlp, c_gate_mlp = self.norm1_context( # diffusers/src/diffusers/models/transformers/transformer_flux.py:167 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].norm1_context.emb, accessed_by=DictGetItemGuardAccessor(emb) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].norm1_context.emb, 140591004478624) # if self.emb is not None: # diffusers/src/diffusers/models/normalization.py:135 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].norm1_context._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].norm1_context.norm, accessed_by=DictGetItemGuardAccessor(norm) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].norm1_context.norm, 140581769901728) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:139 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].norm1_context.norm.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].norm1_context.norm.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].norm1_context.norm.training, 140591004393440) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:139 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].norm1_context.silu, accessed_by=DictGetItemGuardAccessor(silu) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].norm1_context.silu, 140581769901632) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].norm1_context.silu.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].norm1_context.silu.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].norm1_context.silu.training, 140591004393440) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].norm1_context.linear, accessed_by=DictGetItemGuardAccessor(linear) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].norm1_context.linear, 140533115023712) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].norm1_context.linear.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[16].norm1_context.linear.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].norm1_context.linear.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].norm1_context.linear.training, 140591004393408) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].norm1_context.linear._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].norm1_context.linear.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].norm1_context.linear.lora_A, 140533115013968) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].norm1_context.linear.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].norm1_context.linear.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].norm1_context.linear.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].norm1_context.linear.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].norm1_context.linear.lora_A['default_0'], 140533115014112) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].norm1_context.linear.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].norm1_context.linear.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].norm1_context.linear.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].norm1_context.linear.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].norm1_context.linear.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].norm1_context.linear.lora_A['default_0'].weight, 140526787430016) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].norm1_context.linear.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].norm1_context.linear.lora_B, 140533115017328) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].norm1_context.linear.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].norm1_context.linear.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].norm1_context.linear.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].norm1_context.linear.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].norm1_context.linear.lora_B['default_0'], 140533115012240) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].norm1_context.linear.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].norm1_context.linear.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].norm1_context.linear.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].norm1_context.linear.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].norm1_context.linear.base_layer, 140581769901680) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].norm1_context.linear.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].norm1_context.linear.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].norm1_context.linear.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].norm1_context.linear.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].norm1_context.linear.lora_dropout, 140533115014784) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].norm1_context.linear.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].norm1_context.linear.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].norm1_context.linear.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].norm1_context.linear.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].norm1_context.linear.lora_dropout['default_0'], 140533115013920) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].norm1_context.linear.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].norm1_context.linear.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].norm1_context.linear.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].norm1_context.linear.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[16].norm1_context.linear.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[16].norm1_context.linear.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].norm1_context.linear.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[16].norm1_context.linear.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].norm1_context.linear.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[16].norm1_context.linear.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[16].norm1_context.linear.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].norm1_context.linear.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].norm1_context.linear.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].norm1_context.linear._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].norm1_context.linear._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].norm1_context.linear.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[16].norm1_context.linear.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[16].norm1_context.linear.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].norm1_context.linear._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].norm1_context.linear._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].norm1_context.linear._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].norm1_context.linear._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].norm1_context.linear._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[16].norm1_context.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[16].norm1_context.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].norm1_context._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].norm1_context._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].norm1_context._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].norm1_context._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].norm2_context, accessed_by=DictGetItemGuardAccessor(norm2_context) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].norm2_context, 140581769902736) # norm_encoder_hidden_states = self.norm2_context(encoder_hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:195 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].norm2_context.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16].norm2_context.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[16].norm2_context.training, 140591004393440) # norm_encoder_hidden_states = self.norm2_context(encoder_hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:195 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[16]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | +- GuardManager: source=L['self'].transformer_blocks[17], accessed_by=GetItemGuardAccessor(17) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17], 140581769901008) # for index_block, block in enumerate(self.transformer_blocks): # diffusers/src/diffusers/models/transformers/transformer_flux.py:471 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[17].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].training, 140591004393440) # for index_block, block in enumerate(self.transformer_blocks): # diffusers/src/diffusers/models/transformers/transformer_flux.py:471 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff, accessed_by=DictGetItemGuardAccessor(ff) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].ff, 140581770183248) # ff_output = self.ff(norm_hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:185 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[17].ff.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].ff.training, 140591004393440) # ff_output = self.ff(norm_hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:185 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff.net, accessed_by=DictGetItemGuardAccessor(net) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].ff.net, 140581770183536) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[17].ff.net, 93831537618768) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- LENGTH_CHECK: len(L['self'].transformer_blocks[17].ff.net) == 3 # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff.net.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff.net.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].ff.net.training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff.net[0], accessed_by=GetItemGuardAccessor(0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].ff.net[0], 140581770183488) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff.net[0].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[17].ff.net[0].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff.net[0].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].ff.net[0].training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff.net[0]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff.net[0].proj, accessed_by=DictGetItemGuardAccessor(proj) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].ff.net[0].proj, 140533114647552) # hidden_states = self.proj(hidden_states) # diffusers/src/diffusers/models/activations.py:88 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff.net[0].proj.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[17].ff.net[0].proj.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff.net[0].proj.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].ff.net[0].proj.training, 140591004393408) # hidden_states = self.proj(hidden_states) # diffusers/src/diffusers/models/activations.py:88 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff.net[0].proj._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff.net[0].proj.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].ff.net[0].proj.lora_A, 140533114645008) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff.net[0].proj.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff.net[0].proj.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].ff.net[0].proj.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff.net[0].proj.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].ff.net[0].proj.lora_A['default_0'], 140533114634352) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff.net[0].proj.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff.net[0].proj.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].ff.net[0].proj.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff.net[0].proj.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff.net[0].proj.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].ff.net[0].proj.lora_A['default_0'].weight, 140526668629152) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff.net[0].proj.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].ff.net[0].proj.lora_B, 140533114641744) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff.net[0].proj.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff.net[0].proj.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].ff.net[0].proj.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff.net[0].proj.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].ff.net[0].proj.lora_B['default_0'], 140533114648416) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff.net[0].proj.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff.net[0].proj.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].ff.net[0].proj.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff.net[0].proj.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].ff.net[0].proj.base_layer, 140581770183584) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff.net[0].proj.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff.net[0].proj.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].ff.net[0].proj.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff.net[0].proj.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].ff.net[0].proj.lora_dropout, 140533114641600) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff.net[0].proj.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff.net[0].proj.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].ff.net[0].proj.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff.net[0].proj.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].ff.net[0].proj.lora_dropout['default_0'], 140533114640304) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff.net[0].proj.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff.net[0].proj.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].ff.net[0].proj.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff.net[0].proj.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[17].ff.net[0].proj.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[17].ff.net[0].proj.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff.net[0].proj.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[17].ff.net[0].proj.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff.net[0].proj.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[17].ff.net[0].proj.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[17].ff.net[0].proj.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff.net[0].proj.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].ff.net[0].proj.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff.net[0].proj._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff.net[0].proj._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff.net[0].proj.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[17].ff.net[0].proj.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[17].ff.net[0].proj.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff.net[0].proj._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].ff.net[0].proj._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff.net[0].proj._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff.net[0].proj._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff.net[0].proj._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[17].ff.net[0].proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[17].ff.net[0].proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff.net[0].approximate, accessed_by=DictGetItemGuardAccessor(approximate) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[17].ff.net[0].approximate == 'tanh' # return F.gelu(gate, approximate=self.approximate) # diffusers/src/diffusers/models/activations.py:83 in gelu V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff.net[0]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff.net[0]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff.net[0]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff.net[0]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff.net[1], accessed_by=GetItemGuardAccessor(1) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].ff.net[1], 140581770183632) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff.net[1].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff.net[1].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].ff.net[1].training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff.net[2], accessed_by=GetItemGuardAccessor(2) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].ff.net[2], 140533114641024) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff.net[2].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[17].ff.net[2].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff.net[2].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].ff.net[2].training, 140591004393408) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff.net[2]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff.net[2].lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].ff.net[2].lora_A, 140533114648128) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff.net[2].lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff.net[2].lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].ff.net[2].lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff.net[2].lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].ff.net[2].lora_A['default_0'], 140533116581968) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff.net[2].lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff.net[2].lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].ff.net[2].lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff.net[2].lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff.net[2].lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].ff.net[2].lora_A['default_0'].weight, 140533119424832) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff.net[2].lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].ff.net[2].lora_B, 140533114642080) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff.net[2].lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff.net[2].lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].ff.net[2].lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff.net[2].lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].ff.net[2].lora_B['default_0'], 140533116575200) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff.net[2].lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff.net[2].lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].ff.net[2].lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff.net[2].base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].ff.net[2].base_layer, 140581770183680) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff.net[2].base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff.net[2].base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].ff.net[2].base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff.net[2].lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].ff.net[2].lora_dropout, 140533114634832) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff.net[2].lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff.net[2].lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].ff.net[2].lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff.net[2].lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].ff.net[2].lora_dropout['default_0'], 140533114635936) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff.net[2].lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff.net[2].lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].ff.net[2].lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff.net[2].scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[17].ff.net[2].scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[17].ff.net[2].scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff.net[2].scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[17].ff.net[2].scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff.net[2].use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[17].ff.net[2].use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[17].ff.net[2].use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff.net[2].use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].ff.net[2].use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff.net[2]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff.net[2]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff.net[2].merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[17].ff.net[2].merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[17].ff.net[2].merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff.net[2]._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].ff.net[2]._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff.net[2]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff.net[2]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff.net[2]._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[17].ff.net[2]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[17].ff.net[2]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn, accessed_by=DictGetItemGuardAccessor(attn) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn, 140581769903888) # attn_output, context_attn_output = self.attn( # diffusers/src/diffusers/models/transformers/transformer_flux.py:172 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[17].attn.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.training, 140591004393440) # attn_output, context_attn_output = self.attn( # diffusers/src/diffusers/models/transformers/transformer_flux.py:172 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_k, accessed_by=DictGetItemGuardAccessor(to_k) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.to_k, 140533115173712) # key = attn.to_k(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1717 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[17].attn.to_k.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.to_k.training, 140591004393408) # key = attn.to_k(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1717 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_k._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_k.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.to_k.lora_A, 140533115164544) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_k.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_k.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.to_k.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_k.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.to_k.lora_A['default_0'], 140533115166224) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_k.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_k.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.to_k.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_k.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_k.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.to_k.lora_A['default_0'].weight, 140537313187920) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_k.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.to_k.lora_B, 140533115165024) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_k.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_k.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.to_k.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_k.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.to_k.lora_B['default_0'], 140533115168000) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_k.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_k.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.to_k.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_k.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.to_k.base_layer, 140581770182720) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_k.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_k.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.to_k.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_k.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.to_k.lora_dropout, 140533115159552) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_k.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_k.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.to_k.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_k.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.to_k.lora_dropout['default_0'], 140533115158832) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_k.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_k.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.to_k.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_k.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[17].attn.to_k.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[17].attn.to_k.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_k.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[17].attn.to_k.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_k.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[17].attn.to_k.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[17].attn.to_k.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_k.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.to_k.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_k._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_k._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_k.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[17].attn.to_k.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[17].attn.to_k.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_k._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.to_k._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_k._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_k._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_k._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[17].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[17].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_q, accessed_by=DictGetItemGuardAccessor(to_q) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.to_q, 140533115166656) # query = attn.to_q(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1716 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[17].attn.to_q.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.to_q.training, 140591004393408) # query = attn.to_q(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1716 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_q._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_q.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.to_q.lora_A, 140533115168576) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_q.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_q.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.to_q.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_q.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.to_q.lora_A['default_0'], 140533115161184) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_q.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_q.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.to_q.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_q.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_q.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.to_q.lora_A['default_0'].weight, 140537313196720) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_q.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.to_q.lora_B, 140533115162000) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_q.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_q.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.to_q.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_q.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.to_q.lora_B['default_0'], 140533115161424) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_q.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_q.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.to_q.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_q.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.to_q.base_layer, 140581769904032) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_q.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_q.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.to_q.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_q.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.to_q.lora_dropout, 140533115167136) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_q.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_q.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.to_q.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_q.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.to_q.lora_dropout['default_0'], 140533115166416) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_q.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_q.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.to_q.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_q.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[17].attn.to_q.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[17].attn.to_q.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_q.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[17].attn.to_q.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_q.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[17].attn.to_q.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[17].attn.to_q.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_q.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.to_q.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_q._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_q._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_q.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[17].attn.to_q.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[17].attn.to_q.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_q._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.to_q._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_q._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_q._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_q._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[17].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[17].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_v, accessed_by=DictGetItemGuardAccessor(to_v) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.to_v, 140533115969664) # value = attn.to_v(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1718 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_v.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[17].attn.to_v.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_v.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.to_v.training, 140591004393408) # value = attn.to_v(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1718 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_v._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_v.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.to_v.lora_A, 140533115216816) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_v.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_v.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.to_v.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_v.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.to_v.lora_A['default_0'], 140533115210768) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_v.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_v.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.to_v.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_v.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_v.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.to_v.lora_A['default_0'].weight, 140537313187040) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_v.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.to_v.lora_B, 140533115211296) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_v.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_v.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.to_v.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_v.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.to_v.lora_B['default_0'], 140533115221424) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_v.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_v.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.to_v.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_v.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.to_v.base_layer, 140581770182816) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_v.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_v.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.to_v.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_v.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.to_v.lora_dropout, 140533115210288) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_v.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_v.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.to_v.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_v.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.to_v.lora_dropout['default_0'], 140533115210720) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_v.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_v.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.to_v.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_v.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[17].attn.to_v.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[17].attn.to_v.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_v.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[17].attn.to_v.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_v.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[17].attn.to_v.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[17].attn.to_v.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_v.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.to_v.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_v._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_v._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_v.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[17].attn.to_v.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[17].attn.to_v.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_v._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.to_v._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_v._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_v._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_v._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[17].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[17].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.norm_k, accessed_by=DictGetItemGuardAccessor(norm_k) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.norm_k, 140581769904080) # if attn.norm_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1729 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.norm_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[17].attn.norm_k.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.norm_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.norm_k.training, 140591004393440) # if attn.norm_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1729 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.norm_k.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[17].attn.norm_k.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.norm_k._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.norm_k.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.norm_k.weight, 140581772778192) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.norm_k._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.norm_k._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.norm_k._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.norm_k._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.norm_q, accessed_by=DictGetItemGuardAccessor(norm_q) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.norm_q, 140581769903984) # if attn.norm_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1727 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.norm_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[17].attn.norm_q.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.norm_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.norm_q.training, 140591004393440) # if attn.norm_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1727 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.norm_q.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[17].attn.norm_q.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.norm_q._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.norm_q.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.norm_q.weight, 140581772741744) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.norm_q._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.norm_q._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.norm_q._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.norm_q._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_out, accessed_by=DictGetItemGuardAccessor(to_out) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.to_out, 140581770183008) # hidden_states = attn.to_out[0](hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1776 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_out.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_out.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.to_out.training, 140591004393440) # hidden_states = attn.to_out[0](hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1776 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_out[0], accessed_by=GetItemGuardAccessor(0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.to_out[0], 140533114648848) # hidden_states = attn.to_out[0](hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1776 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_out[0].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[17].attn.to_out[0].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_out[0].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.to_out[0].training, 140591004393408) # hidden_states = attn.to_out[0](hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1776 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_out[0]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_out[0].lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.to_out[0].lora_A, 140533114636176) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_out[0].lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_out[0].lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.to_out[0].lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_out[0].lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.to_out[0].lora_A['default_0'], 140533114642320) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_out[0].lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_out[0].lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.to_out[0].lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_out[0].lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_out[0].lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.to_out[0].lora_A['default_0'].weight, 140526687441984) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_out[0].lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.to_out[0].lora_B, 140533114649088) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_out[0].lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_out[0].lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.to_out[0].lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_out[0].lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.to_out[0].lora_B['default_0'], 140533114648944) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_out[0].lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_out[0].lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.to_out[0].lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_out[0].base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.to_out[0].base_layer, 140581770183056) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_out[0].base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_out[0].base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.to_out[0].base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_out[0].lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.to_out[0].lora_dropout, 140533114636128) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_out[0].lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_out[0].lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.to_out[0].lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_out[0].lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.to_out[0].lora_dropout['default_0'], 140533114639776) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_out[0].lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_out[0].lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.to_out[0].lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_out[0].scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[17].attn.to_out[0].scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[17].attn.to_out[0].scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_out[0].scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[17].attn.to_out[0].scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_out[0].use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[17].attn.to_out[0].use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[17].attn.to_out[0].use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_out[0].use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.to_out[0].use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_out[0]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_out[0]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_out[0].merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[17].attn.to_out[0].merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[17].attn.to_out[0].merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_out[0]._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.to_out[0]._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_out[0]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_out[0]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_out[0]._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[17].attn.to_out[0]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[17].attn.to_out[0]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_out[1], accessed_by=GetItemGuardAccessor(1) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.to_out[1], 140581770183104) # hidden_states = attn.to_out[1](hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1778 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_out[1].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_out[1].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.to_out[1].training, 140591004393440) # hidden_states = attn.to_out[1](hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1778 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.add_k_proj, accessed_by=DictGetItemGuardAccessor(add_k_proj) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.add_k_proj, 140533115222528) # encoder_hidden_states_key_proj = attn.add_k_proj(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1736 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.add_k_proj.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[17].attn.add_k_proj.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.add_k_proj.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.add_k_proj.training, 140591004393408) # encoder_hidden_states_key_proj = attn.add_k_proj(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1736 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.add_k_proj._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.add_k_proj.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.add_k_proj.lora_A, 140533115209952) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.add_k_proj.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.add_k_proj.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.add_k_proj.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.add_k_proj.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.add_k_proj.lora_A['default_0'], 140533115223344) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.add_k_proj.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.add_k_proj.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.add_k_proj.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.add_k_proj.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.add_k_proj.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.add_k_proj.lora_A['default_0'].weight, 140526768499632) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.add_k_proj.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.add_k_proj.lora_B, 140533115218400) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.add_k_proj.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.add_k_proj.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.add_k_proj.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.add_k_proj.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.add_k_proj.lora_B['default_0'], 140533115217728) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.add_k_proj.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.add_k_proj.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.add_k_proj.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.add_k_proj.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.add_k_proj.base_layer, 140581770182864) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.add_k_proj.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.add_k_proj.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.add_k_proj.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.add_k_proj.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.add_k_proj.lora_dropout, 140533115218592) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.add_k_proj.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.add_k_proj.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.add_k_proj.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.add_k_proj.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.add_k_proj.lora_dropout['default_0'], 140533115222240) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.add_k_proj.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.add_k_proj.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.add_k_proj.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.add_k_proj.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[17].attn.add_k_proj.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[17].attn.add_k_proj.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.add_k_proj.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[17].attn.add_k_proj.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.add_k_proj.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[17].attn.add_k_proj.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[17].attn.add_k_proj.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.add_k_proj.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.add_k_proj.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.add_k_proj._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.add_k_proj._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.add_k_proj.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[17].attn.add_k_proj.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[17].attn.add_k_proj.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.add_k_proj._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.add_k_proj._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.add_k_proj._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.add_k_proj._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.add_k_proj._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[17].attn.add_k_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[17].attn.add_k_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.add_q_proj, accessed_by=DictGetItemGuardAccessor(add_q_proj) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.add_q_proj, 140533115219600) # encoder_hidden_states_query_proj = attn.add_q_proj(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1735 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.add_q_proj.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[17].attn.add_q_proj.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.add_q_proj.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.add_q_proj.training, 140591004393408) # encoder_hidden_states_query_proj = attn.add_q_proj(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1735 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.add_q_proj._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.add_q_proj.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.add_q_proj.lora_A, 140533115221664) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.add_q_proj.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.add_q_proj.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.add_q_proj.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.add_q_proj.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.add_q_proj.lora_A['default_0'], 140533114649040) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.add_q_proj.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.add_q_proj.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.add_q_proj.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.add_q_proj.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.add_q_proj.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.add_q_proj.lora_A['default_0'].weight, 140526768504512) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.add_q_proj.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.add_q_proj.lora_B, 140533115210528) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.add_q_proj.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.add_q_proj.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.add_q_proj.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.add_q_proj.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.add_q_proj.lora_B['default_0'], 140533114648032) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.add_q_proj.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.add_q_proj.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.add_q_proj.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.add_q_proj.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.add_q_proj.base_layer, 140581770182960) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.add_q_proj.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.add_q_proj.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.add_q_proj.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.add_q_proj.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.add_q_proj.lora_dropout, 140533115222432) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.add_q_proj.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.add_q_proj.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.add_q_proj.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.add_q_proj.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.add_q_proj.lora_dropout['default_0'], 140533115208032) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.add_q_proj.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.add_q_proj.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.add_q_proj.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.add_q_proj.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[17].attn.add_q_proj.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[17].attn.add_q_proj.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.add_q_proj.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[17].attn.add_q_proj.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.add_q_proj.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[17].attn.add_q_proj.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[17].attn.add_q_proj.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.add_q_proj.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.add_q_proj.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.add_q_proj._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.add_q_proj._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.add_q_proj.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[17].attn.add_q_proj.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[17].attn.add_q_proj.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.add_q_proj._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.add_q_proj._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.add_q_proj._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.add_q_proj._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.add_q_proj._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[17].attn.add_q_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[17].attn.add_q_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.add_v_proj, accessed_by=DictGetItemGuardAccessor(add_v_proj) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.add_v_proj, 140533115208368) # encoder_hidden_states_value_proj = attn.add_v_proj(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1737 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.add_v_proj.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[17].attn.add_v_proj.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.add_v_proj.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.add_v_proj.training, 140591004393408) # encoder_hidden_states_value_proj = attn.add_v_proj(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1737 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.add_v_proj._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.add_v_proj.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.add_v_proj.lora_A, 140533115216624) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.add_v_proj.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.add_v_proj.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.add_v_proj.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.add_v_proj.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.add_v_proj.lora_A['default_0'], 140533115218544) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.add_v_proj.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.add_v_proj.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.add_v_proj.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.add_v_proj.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.add_v_proj.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.add_v_proj.lora_A['default_0'].weight, 140526768490592) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.add_v_proj.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.add_v_proj.lora_B, 140533115211440) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.add_v_proj.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.add_v_proj.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.add_v_proj.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.add_v_proj.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.add_v_proj.lora_B['default_0'], 140533115223248) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.add_v_proj.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.add_v_proj.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.add_v_proj.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.add_v_proj.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.add_v_proj.base_layer, 140581770182912) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.add_v_proj.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.add_v_proj.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.add_v_proj.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.add_v_proj.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.add_v_proj.lora_dropout, 140533115222864) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.add_v_proj.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.add_v_proj.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.add_v_proj.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.add_v_proj.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.add_v_proj.lora_dropout['default_0'], 140533115223728) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.add_v_proj.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.add_v_proj.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.add_v_proj.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.add_v_proj.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[17].attn.add_v_proj.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[17].attn.add_v_proj.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.add_v_proj.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[17].attn.add_v_proj.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.add_v_proj.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[17].attn.add_v_proj.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[17].attn.add_v_proj.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.add_v_proj.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.add_v_proj.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.add_v_proj._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.add_v_proj._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.add_v_proj.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[17].attn.add_v_proj.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[17].attn.add_v_proj.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.add_v_proj._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.add_v_proj._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.add_v_proj._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.add_v_proj._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.add_v_proj._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[17].attn.add_v_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[17].attn.add_v_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_add_out, accessed_by=DictGetItemGuardAccessor(to_add_out) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.to_add_out, 140533114647168) # encoder_hidden_states = attn.to_add_out(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1779 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_add_out.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[17].attn.to_add_out.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_add_out.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.to_add_out.training, 140591004393408) # encoder_hidden_states = attn.to_add_out(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1779 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_add_out._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_add_out.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.to_add_out.lora_A, 140533114644720) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_add_out.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_add_out.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.to_add_out.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_add_out.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.to_add_out.lora_A['default_0'], 140533114646592) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_add_out.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_add_out.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.to_add_out.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_add_out.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_add_out.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.to_add_out.lora_A['default_0'].weight, 140526668642352) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_add_out.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.to_add_out.lora_B, 140533114639920) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_add_out.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_add_out.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.to_add_out.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_add_out.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.to_add_out.lora_B['default_0'], 140533114642224) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_add_out.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_add_out.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.to_add_out.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_add_out.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.to_add_out.base_layer, 140581770183152) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_add_out.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_add_out.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.to_add_out.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_add_out.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.to_add_out.lora_dropout, 140533114648320) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_add_out.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_add_out.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.to_add_out.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_add_out.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.to_add_out.lora_dropout['default_0'], 140533114641984) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_add_out.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_add_out.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.to_add_out.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_add_out.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[17].attn.to_add_out.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[17].attn.to_add_out.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_add_out.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[17].attn.to_add_out.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_add_out.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[17].attn.to_add_out.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[17].attn.to_add_out.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_add_out.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.to_add_out.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_add_out._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_add_out._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_add_out.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[17].attn.to_add_out.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[17].attn.to_add_out.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_add_out._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.to_add_out._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_add_out._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_add_out._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.to_add_out._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[17].attn.to_add_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[17].attn.to_add_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.norm_added_k, accessed_by=DictGetItemGuardAccessor(norm_added_k) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.norm_added_k, 140581770183296) # if attn.norm_added_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1751 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.norm_added_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[17].attn.norm_added_k.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.norm_added_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.norm_added_k.training, 140591004393440) # if attn.norm_added_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1751 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.norm_added_k.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[17].attn.norm_added_k.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.norm_added_k._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.norm_added_k.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.norm_added_k.weight, 140581772742144) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.norm_added_k._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.norm_added_k._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.norm_added_k._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.norm_added_k._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.norm_added_q, accessed_by=DictGetItemGuardAccessor(norm_added_q) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.norm_added_q, 140581770183200) # if attn.norm_added_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1749 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.norm_added_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[17].attn.norm_added_q.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.norm_added_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.norm_added_q.training, 140591004393440) # if attn.norm_added_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1749 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.norm_added_q.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[17].attn.norm_added_q.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.norm_added_q._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.norm_added_q.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.norm_added_q.weight, 140581772745504) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.norm_added_q._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.norm_added_q._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.norm_added_q._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.norm_added_q._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.heads, accessed_by=DictGetItemGuardAccessor(heads) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[17].attn.heads == 24 # head_dim = inner_dim // attn.heads # diffusers/src/diffusers/models/attention_processor.py:1721 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn.processor, accessed_by=DictGetItemGuardAccessor(processor) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[17].attn.processor, 93831581524080) # attn_parameters = set(inspect.signature(self.processor.__call__).parameters.keys()) # diffusers/src/diffusers/models/attention_processor.py:479 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].attn.processor, 140581769903840) # return self.processor( # diffusers/src/diffusers/models/attention_processor.py:490 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].attn._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].norm1, accessed_by=DictGetItemGuardAccessor(norm1) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].norm1, 140581769903408) # norm_hidden_states, gate_msa, shift_mlp, scale_mlp, gate_mlp = self.norm1(hidden_states, emb=temb) # diffusers/src/diffusers/models/transformers/transformer_flux.py:165 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].norm1.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[17].norm1.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].norm1.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].norm1.training, 140591004393440) # norm_hidden_states, gate_msa, shift_mlp, scale_mlp, gate_mlp = self.norm1(hidden_states, emb=temb) # diffusers/src/diffusers/models/transformers/transformer_flux.py:165 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].norm1.emb, accessed_by=DictGetItemGuardAccessor(emb) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].norm1.emb, 140591004478624) # if self.emb is not None: # diffusers/src/diffusers/models/normalization.py:135 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].norm1._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].norm1.norm, accessed_by=DictGetItemGuardAccessor(norm) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].norm1.norm, 140581769903552) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:139 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].norm1.norm.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].norm1.norm.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].norm1.norm.training, 140591004393440) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:139 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].norm1.silu, accessed_by=DictGetItemGuardAccessor(silu) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].norm1.silu, 140581769903456) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].norm1.silu.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].norm1.silu.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].norm1.silu.training, 140591004393440) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].norm1.linear, accessed_by=DictGetItemGuardAccessor(linear) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].norm1.linear, 140533116086656) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].norm1.linear.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[17].norm1.linear.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].norm1.linear.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].norm1.linear.training, 140591004393408) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].norm1.linear._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].norm1.linear.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].norm1.linear.lora_A, 140533115082912) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].norm1.linear.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].norm1.linear.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].norm1.linear.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].norm1.linear.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].norm1.linear.lora_A['default_0'], 140533115079120) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].norm1.linear.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].norm1.linear.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].norm1.linear.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].norm1.linear.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].norm1.linear.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].norm1.linear.lora_A['default_0'].weight, 140537313194960) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].norm1.linear.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].norm1.linear.lora_B, 140533115077344) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].norm1.linear.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].norm1.linear.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].norm1.linear.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].norm1.linear.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].norm1.linear.lora_B['default_0'], 140533115087328) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].norm1.linear.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].norm1.linear.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].norm1.linear.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].norm1.linear.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].norm1.linear.base_layer, 140581769903504) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].norm1.linear.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].norm1.linear.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].norm1.linear.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].norm1.linear.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].norm1.linear.lora_dropout, 140533115077728) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].norm1.linear.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].norm1.linear.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].norm1.linear.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].norm1.linear.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].norm1.linear.lora_dropout['default_0'], 140533115091888) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].norm1.linear.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].norm1.linear.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].norm1.linear.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].norm1.linear.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[17].norm1.linear.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[17].norm1.linear.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].norm1.linear.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[17].norm1.linear.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].norm1.linear.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[17].norm1.linear.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[17].norm1.linear.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].norm1.linear.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].norm1.linear.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].norm1.linear._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].norm1.linear._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].norm1.linear.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[17].norm1.linear.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[17].norm1.linear.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].norm1.linear._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].norm1.linear._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].norm1.linear._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].norm1.linear._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].norm1.linear._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[17].norm1.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[17].norm1.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].norm1._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].norm1._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].norm1._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].norm1._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].norm2, accessed_by=DictGetItemGuardAccessor(norm2) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].norm2, 140581770183344) # norm_hidden_states = self.norm2(hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:182 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].norm2.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].norm2.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].norm2.training, 140591004393440) # norm_hidden_states = self.norm2(hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:182 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff_context, accessed_by=DictGetItemGuardAccessor(ff_context) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].ff_context, 140581770183728) # context_ff_output = self.ff_context(norm_encoder_hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:198 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff_context.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[17].ff_context.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff_context.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].ff_context.training, 140591004393440) # context_ff_output = self.ff_context(norm_encoder_hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:198 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff_context._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff_context.net, accessed_by=DictGetItemGuardAccessor(net) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].ff_context.net, 140581770183872) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[17].ff_context.net, 93831537618768) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- LENGTH_CHECK: len(L['self'].transformer_blocks[17].ff_context.net) == 3 # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff_context.net.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff_context.net.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].ff_context.net.training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff_context.net[0], accessed_by=GetItemGuardAccessor(0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].ff_context.net[0], 140581770183824) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff_context.net[0].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[17].ff_context.net[0].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff_context.net[0].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].ff_context.net[0].training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff_context.net[0]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff_context.net[0].proj, accessed_by=DictGetItemGuardAccessor(proj) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].ff_context.net[0].proj, 140533116581776) # hidden_states = self.proj(hidden_states) # diffusers/src/diffusers/models/activations.py:88 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff_context.net[0].proj.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[17].ff_context.net[0].proj.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff_context.net[0].proj.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].ff_context.net[0].proj.training, 140591004393408) # hidden_states = self.proj(hidden_states) # diffusers/src/diffusers/models/activations.py:88 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff_context.net[0].proj._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff_context.net[0].proj.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].ff_context.net[0].proj.lora_A, 140533116571312) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff_context.net[0].proj.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff_context.net[0].proj.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].ff_context.net[0].proj.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff_context.net[0].proj.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].ff_context.net[0].proj.lora_A['default_0'], 140533114781168) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff_context.net[0].proj.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff_context.net[0].proj.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].ff_context.net[0].proj.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff_context.net[0].proj.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff_context.net[0].proj.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].ff_context.net[0].proj.lora_A['default_0'].weight, 140533119419152) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff_context.net[0].proj.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].ff_context.net[0].proj.lora_B, 140533116581920) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff_context.net[0].proj.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff_context.net[0].proj.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].ff_context.net[0].proj.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff_context.net[0].proj.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].ff_context.net[0].proj.lora_B['default_0'], 140533114767488) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff_context.net[0].proj.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff_context.net[0].proj.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].ff_context.net[0].proj.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff_context.net[0].proj.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].ff_context.net[0].proj.base_layer, 140581770183920) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff_context.net[0].proj.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff_context.net[0].proj.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].ff_context.net[0].proj.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff_context.net[0].proj.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].ff_context.net[0].proj.lora_dropout, 140533116577504) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff_context.net[0].proj.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff_context.net[0].proj.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].ff_context.net[0].proj.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff_context.net[0].proj.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].ff_context.net[0].proj.lora_dropout['default_0'], 140533116574576) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff_context.net[0].proj.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff_context.net[0].proj.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].ff_context.net[0].proj.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff_context.net[0].proj.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[17].ff_context.net[0].proj.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[17].ff_context.net[0].proj.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff_context.net[0].proj.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[17].ff_context.net[0].proj.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff_context.net[0].proj.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[17].ff_context.net[0].proj.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[17].ff_context.net[0].proj.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff_context.net[0].proj.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].ff_context.net[0].proj.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff_context.net[0].proj._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff_context.net[0].proj._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff_context.net[0].proj.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[17].ff_context.net[0].proj.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[17].ff_context.net[0].proj.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff_context.net[0].proj._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].ff_context.net[0].proj._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff_context.net[0].proj._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff_context.net[0].proj._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff_context.net[0].proj._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[17].ff_context.net[0].proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[17].ff_context.net[0].proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff_context.net[0].approximate, accessed_by=DictGetItemGuardAccessor(approximate) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[17].ff_context.net[0].approximate == 'tanh' # return F.gelu(gate, approximate=self.approximate) # diffusers/src/diffusers/models/activations.py:83 in gelu V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff_context.net[0]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff_context.net[0]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff_context.net[0]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff_context.net[0]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff_context.net[1], accessed_by=GetItemGuardAccessor(1) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].ff_context.net[1], 140581770184016) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff_context.net[1].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff_context.net[1].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].ff_context.net[1].training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff_context.net[2], accessed_by=GetItemGuardAccessor(2) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].ff_context.net[2], 140533114776752) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff_context.net[2].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[17].ff_context.net[2].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff_context.net[2].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].ff_context.net[2].training, 140591004393408) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff_context.net[2]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff_context.net[2].lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].ff_context.net[2].lora_A, 140533114776608) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff_context.net[2].lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff_context.net[2].lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].ff_context.net[2].lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff_context.net[2].lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].ff_context.net[2].lora_A['default_0'], 140533114767344) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff_context.net[2].lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff_context.net[2].lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].ff_context.net[2].lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff_context.net[2].lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff_context.net[2].lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].ff_context.net[2].lora_A['default_0'].weight, 140533119420032) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff_context.net[2].lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].ff_context.net[2].lora_B, 140533114772576) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff_context.net[2].lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff_context.net[2].lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].ff_context.net[2].lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff_context.net[2].lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].ff_context.net[2].lora_B['default_0'], 140533114780304) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff_context.net[2].lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff_context.net[2].lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].ff_context.net[2].lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff_context.net[2].base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].ff_context.net[2].base_layer, 140581770184064) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff_context.net[2].base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff_context.net[2].base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].ff_context.net[2].base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff_context.net[2].lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].ff_context.net[2].lora_dropout, 140533114775648) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff_context.net[2].lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff_context.net[2].lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].ff_context.net[2].lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff_context.net[2].lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].ff_context.net[2].lora_dropout['default_0'], 140533114775168) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff_context.net[2].lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff_context.net[2].lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].ff_context.net[2].lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff_context.net[2].scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[17].ff_context.net[2].scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[17].ff_context.net[2].scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff_context.net[2].scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[17].ff_context.net[2].scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff_context.net[2].use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[17].ff_context.net[2].use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[17].ff_context.net[2].use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff_context.net[2].use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].ff_context.net[2].use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff_context.net[2]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff_context.net[2]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff_context.net[2].merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[17].ff_context.net[2].merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[17].ff_context.net[2].merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff_context.net[2]._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].ff_context.net[2]._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff_context.net[2]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff_context.net[2]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff_context.net[2]._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[17].ff_context.net[2]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[17].ff_context.net[2]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff_context._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff_context._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff_context._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].ff_context._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].norm1_context, accessed_by=DictGetItemGuardAccessor(norm1_context) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].norm1_context, 140581769903600) # norm_encoder_hidden_states, c_gate_msa, c_shift_mlp, c_scale_mlp, c_gate_mlp = self.norm1_context( # diffusers/src/diffusers/models/transformers/transformer_flux.py:167 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].norm1_context.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[17].norm1_context.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].norm1_context.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].norm1_context.training, 140591004393440) # norm_encoder_hidden_states, c_gate_msa, c_shift_mlp, c_scale_mlp, c_gate_mlp = self.norm1_context( # diffusers/src/diffusers/models/transformers/transformer_flux.py:167 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].norm1_context.emb, accessed_by=DictGetItemGuardAccessor(emb) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].norm1_context.emb, 140591004478624) # if self.emb is not None: # diffusers/src/diffusers/models/normalization.py:135 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].norm1_context._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].norm1_context.norm, accessed_by=DictGetItemGuardAccessor(norm) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].norm1_context.norm, 140581769903792) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:139 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].norm1_context.norm.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].norm1_context.norm.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].norm1_context.norm.training, 140591004393440) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:139 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].norm1_context.silu, accessed_by=DictGetItemGuardAccessor(silu) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].norm1_context.silu, 140581769903696) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].norm1_context.silu.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].norm1_context.silu.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].norm1_context.silu.training, 140591004393440) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].norm1_context.linear, accessed_by=DictGetItemGuardAccessor(linear) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].norm1_context.linear, 140533115077008) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].norm1_context.linear.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[17].norm1_context.linear.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].norm1_context.linear.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].norm1_context.linear.training, 140591004393408) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].norm1_context.linear._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].norm1_context.linear.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].norm1_context.linear.lora_A, 140533115090400) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].norm1_context.linear.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].norm1_context.linear.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].norm1_context.linear.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].norm1_context.linear.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].norm1_context.linear.lora_A['default_0'], 140533115076912) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].norm1_context.linear.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].norm1_context.linear.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].norm1_context.linear.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].norm1_context.linear.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].norm1_context.linear.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].norm1_context.linear.lora_A['default_0'].weight, 140537313191120) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].norm1_context.linear.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].norm1_context.linear.lora_B, 140533115077680) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].norm1_context.linear.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].norm1_context.linear.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].norm1_context.linear.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].norm1_context.linear.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].norm1_context.linear.lora_B['default_0'], 140533115078784) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].norm1_context.linear.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].norm1_context.linear.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].norm1_context.linear.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].norm1_context.linear.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].norm1_context.linear.base_layer, 140581769903744) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].norm1_context.linear.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].norm1_context.linear.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].norm1_context.linear.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].norm1_context.linear.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].norm1_context.linear.lora_dropout, 140533115077056) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].norm1_context.linear.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].norm1_context.linear.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].norm1_context.linear.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].norm1_context.linear.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].norm1_context.linear.lora_dropout['default_0'], 140533115076720) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].norm1_context.linear.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].norm1_context.linear.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].norm1_context.linear.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].norm1_context.linear.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[17].norm1_context.linear.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[17].norm1_context.linear.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].norm1_context.linear.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[17].norm1_context.linear.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].norm1_context.linear.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[17].norm1_context.linear.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[17].norm1_context.linear.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].norm1_context.linear.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].norm1_context.linear.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].norm1_context.linear._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].norm1_context.linear._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].norm1_context.linear.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[17].norm1_context.linear.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[17].norm1_context.linear.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].norm1_context.linear._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].norm1_context.linear._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].norm1_context.linear._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].norm1_context.linear._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].norm1_context.linear._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[17].norm1_context.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[17].norm1_context.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].norm1_context._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].norm1_context._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].norm1_context._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].norm1_context._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].norm2_context, accessed_by=DictGetItemGuardAccessor(norm2_context) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].norm2_context, 140581770183392) # norm_encoder_hidden_states = self.norm2_context(encoder_hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:195 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].norm2_context.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17].norm2_context.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[17].norm2_context.training, 140591004393440) # norm_encoder_hidden_states = self.norm2_context(encoder_hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:195 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[17]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | +- GuardManager: source=L['self'].transformer_blocks[18], accessed_by=GetItemGuardAccessor(18) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18], 140581769903072) # for index_block, block in enumerate(self.transformer_blocks): # diffusers/src/diffusers/models/transformers/transformer_flux.py:471 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[18].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].training, 140591004393440) # for index_block, block in enumerate(self.transformer_blocks): # diffusers/src/diffusers/models/transformers/transformer_flux.py:471 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff, accessed_by=DictGetItemGuardAccessor(ff) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].ff, 140581770185360) # ff_output = self.ff(norm_hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:185 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[18].ff.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].ff.training, 140591004393440) # ff_output = self.ff(norm_hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:185 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff.net, accessed_by=DictGetItemGuardAccessor(net) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].ff.net, 140581770185600) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[18].ff.net, 93831537618768) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- LENGTH_CHECK: len(L['self'].transformer_blocks[18].ff.net) == 3 # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff.net.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff.net.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].ff.net.training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff.net[0], accessed_by=GetItemGuardAccessor(0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].ff.net[0], 140581770185552) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff.net[0].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[18].ff.net[0].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff.net[0].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].ff.net[0].training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff.net[0]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff.net[0].proj, accessed_by=DictGetItemGuardAccessor(proj) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].ff.net[0].proj, 140533115619120) # hidden_states = self.proj(hidden_states) # diffusers/src/diffusers/models/activations.py:88 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff.net[0].proj.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[18].ff.net[0].proj.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff.net[0].proj.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].ff.net[0].proj.training, 140591004393408) # hidden_states = self.proj(hidden_states) # diffusers/src/diffusers/models/activations.py:88 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff.net[0].proj._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff.net[0].proj.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].ff.net[0].proj.lora_A, 140533116661056) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff.net[0].proj.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff.net[0].proj.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].ff.net[0].proj.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff.net[0].proj.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].ff.net[0].proj.lora_A['default_0'], 140533115291056) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff.net[0].proj.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff.net[0].proj.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].ff.net[0].proj.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff.net[0].proj.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff.net[0].proj.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].ff.net[0].proj.lora_A['default_0'].weight, 140526776356992) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff.net[0].proj.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].ff.net[0].proj.lora_B, 140533116657360) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff.net[0].proj.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff.net[0].proj.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].ff.net[0].proj.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff.net[0].proj.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].ff.net[0].proj.lora_B['default_0'], 140533115290576) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff.net[0].proj.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff.net[0].proj.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].ff.net[0].proj.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff.net[0].proj.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].ff.net[0].proj.base_layer, 140581770185648) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff.net[0].proj.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff.net[0].proj.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].ff.net[0].proj.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff.net[0].proj.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].ff.net[0].proj.lora_dropout, 140533116662976) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff.net[0].proj.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff.net[0].proj.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].ff.net[0].proj.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff.net[0].proj.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].ff.net[0].proj.lora_dropout['default_0'], 140533116650928) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff.net[0].proj.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff.net[0].proj.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].ff.net[0].proj.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff.net[0].proj.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[18].ff.net[0].proj.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[18].ff.net[0].proj.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff.net[0].proj.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[18].ff.net[0].proj.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff.net[0].proj.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[18].ff.net[0].proj.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[18].ff.net[0].proj.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff.net[0].proj.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].ff.net[0].proj.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff.net[0].proj._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff.net[0].proj._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff.net[0].proj.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[18].ff.net[0].proj.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[18].ff.net[0].proj.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff.net[0].proj._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].ff.net[0].proj._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff.net[0].proj._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff.net[0].proj._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff.net[0].proj._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[18].ff.net[0].proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[18].ff.net[0].proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff.net[0].approximate, accessed_by=DictGetItemGuardAccessor(approximate) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[18].ff.net[0].approximate == 'tanh' # return F.gelu(gate, approximate=self.approximate) # diffusers/src/diffusers/models/activations.py:83 in gelu V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff.net[0]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff.net[0]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff.net[0]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff.net[0]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff.net[1], accessed_by=GetItemGuardAccessor(1) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].ff.net[1], 140581770185696) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff.net[1].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff.net[1].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].ff.net[1].training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff.net[2], accessed_by=GetItemGuardAccessor(2) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].ff.net[2], 140533115290672) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff.net[2].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[18].ff.net[2].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff.net[2].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].ff.net[2].training, 140591004393408) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff.net[2]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff.net[2].lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].ff.net[2].lora_A, 140533115295280) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff.net[2].lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff.net[2].lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].ff.net[2].lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff.net[2].lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].ff.net[2].lora_A['default_0'], 140533115291680) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff.net[2].lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff.net[2].lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].ff.net[2].lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff.net[2].lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff.net[2].lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].ff.net[2].lora_A['default_0'].weight, 140526684481760) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff.net[2].lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].ff.net[2].lora_B, 140533115304544) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff.net[2].lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff.net[2].lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].ff.net[2].lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff.net[2].lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].ff.net[2].lora_B['default_0'], 140533115295424) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff.net[2].lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff.net[2].lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].ff.net[2].lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff.net[2].base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].ff.net[2].base_layer, 140581770185744) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff.net[2].base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff.net[2].base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].ff.net[2].base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff.net[2].lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].ff.net[2].lora_dropout, 140533115290912) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff.net[2].lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff.net[2].lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].ff.net[2].lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff.net[2].lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].ff.net[2].lora_dropout['default_0'], 140533115294512) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff.net[2].lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff.net[2].lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].ff.net[2].lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff.net[2].scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[18].ff.net[2].scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[18].ff.net[2].scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff.net[2].scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[18].ff.net[2].scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff.net[2].use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[18].ff.net[2].use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[18].ff.net[2].use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff.net[2].use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].ff.net[2].use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff.net[2]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff.net[2]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff.net[2].merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[18].ff.net[2].merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[18].ff.net[2].merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff.net[2]._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].ff.net[2]._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff.net[2]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff.net[2]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff.net[2]._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[18].ff.net[2]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[18].ff.net[2]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn, accessed_by=DictGetItemGuardAccessor(attn) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn, 140581770184592) # attn_output, context_attn_output = self.attn( # diffusers/src/diffusers/models/transformers/transformer_flux.py:172 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[18].attn.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.training, 140591004393440) # attn_output, context_attn_output = self.attn( # diffusers/src/diffusers/models/transformers/transformer_flux.py:172 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_k, accessed_by=DictGetItemGuardAccessor(to_k) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.to_k, 140533114776656) # key = attn.to_k(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1717 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[18].attn.to_k.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.to_k.training, 140591004393408) # key = attn.to_k(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1717 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_k._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_k.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.to_k.lora_A, 140533115039808) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_k.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_k.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.to_k.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_k.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.to_k.lora_A['default_0'], 140533115033280) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_k.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_k.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.to_k.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_k.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_k.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.to_k.lora_A['default_0'].weight, 140526776364272) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_k.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.to_k.lora_B, 140533115041776) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_k.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_k.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.to_k.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_k.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.to_k.lora_B['default_0'], 140533115043312) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_k.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_k.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.to_k.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_k.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.to_k.base_layer, 140581770184736) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_k.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_k.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.to_k.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_k.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.to_k.lora_dropout, 140533115030592) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_k.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_k.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.to_k.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_k.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.to_k.lora_dropout['default_0'], 140533115043072) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_k.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_k.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.to_k.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_k.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[18].attn.to_k.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[18].attn.to_k.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_k.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[18].attn.to_k.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_k.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[18].attn.to_k.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[18].attn.to_k.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_k.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.to_k.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_k._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_k._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_k.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[18].attn.to_k.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[18].attn.to_k.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_k._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.to_k._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_k._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_k._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_k._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[18].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[18].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_q, accessed_by=DictGetItemGuardAccessor(to_q) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.to_q, 140533114719632) # query = attn.to_q(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1716 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[18].attn.to_q.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.to_q.training, 140591004393408) # query = attn.to_q(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1716 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_q._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_q.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.to_q.lora_A, 140533114730624) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_q.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_q.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.to_q.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_q.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.to_q.lora_A['default_0'], 140533114749808) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_q.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_q.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.to_q.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_q.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_q.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.to_q.lora_A['default_0'].weight, 140526558355648) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_q.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.to_q.lora_B, 140533114717952) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_q.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_q.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.to_q.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_q.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.to_q.lora_B['default_0'], 140533114764544) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_q.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_q.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.to_q.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_q.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.to_q.base_layer, 140581770184832) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_q.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_q.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.to_q.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_q.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.to_q.lora_dropout, 140533114719440) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_q.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_q.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.to_q.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_q.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.to_q.lora_dropout['default_0'], 140533114718960) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_q.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_q.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.to_q.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_q.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[18].attn.to_q.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[18].attn.to_q.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_q.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[18].attn.to_q.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_q.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[18].attn.to_q.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[18].attn.to_q.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_q.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.to_q.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_q._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_q._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_q.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[18].attn.to_q.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[18].attn.to_q.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_q._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.to_q._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_q._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_q._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_q._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[18].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[18].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_v, accessed_by=DictGetItemGuardAccessor(to_v) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.to_v, 140533115043792) # value = attn.to_v(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1718 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_v.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[18].attn.to_v.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_v.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.to_v.training, 140591004393408) # value = attn.to_v(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1718 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_v._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_v.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.to_v.lora_A, 140533115032512) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_v.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_v.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.to_v.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_v.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.to_v.lora_A['default_0'], 140533115041440) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_v.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_v.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.to_v.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_v.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_v.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.to_v.lora_A['default_0'].weight, 140526776354032) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_v.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.to_v.lora_B, 140533115034048) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_v.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_v.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.to_v.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_v.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.to_v.lora_B['default_0'], 140533115031840) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_v.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_v.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.to_v.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_v.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.to_v.base_layer, 140581770184928) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_v.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_v.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.to_v.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_v.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.to_v.lora_dropout, 140533115030208) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_v.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_v.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.to_v.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_v.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.to_v.lora_dropout['default_0'], 140533115040864) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_v.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_v.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.to_v.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_v.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[18].attn.to_v.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[18].attn.to_v.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_v.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[18].attn.to_v.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_v.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[18].attn.to_v.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[18].attn.to_v.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_v.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.to_v.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_v._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_v._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_v.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[18].attn.to_v.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[18].attn.to_v.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_v._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.to_v._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_v._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_v._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_v._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[18].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[18].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.norm_k, accessed_by=DictGetItemGuardAccessor(norm_k) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.norm_k, 140581770184784) # if attn.norm_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1729 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.norm_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[18].attn.norm_k.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.norm_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.norm_k.training, 140591004393440) # if attn.norm_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1729 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.norm_k.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[18].attn.norm_k.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.norm_k._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.norm_k.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.norm_k.weight, 140581772745904) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.norm_k._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.norm_k._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.norm_k._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.norm_k._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.norm_q, accessed_by=DictGetItemGuardAccessor(norm_q) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.norm_q, 140581770184688) # if attn.norm_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1727 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.norm_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[18].attn.norm_q.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.norm_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.norm_q.training, 140591004393440) # if attn.norm_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1727 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.norm_q.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[18].attn.norm_q.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.norm_q._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.norm_q.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.norm_q.weight, 140581772745664) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.norm_q._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.norm_q._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.norm_q._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.norm_q._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_out, accessed_by=DictGetItemGuardAccessor(to_out) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.to_out, 140581770185120) # hidden_states = attn.to_out[0](hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1776 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_out.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_out.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.to_out.training, 140591004393440) # hidden_states = attn.to_out[0](hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1776 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_out[0], accessed_by=GetItemGuardAccessor(0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.to_out[0], 140533115630112) # hidden_states = attn.to_out[0](hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1776 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_out[0].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[18].attn.to_out[0].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_out[0].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.to_out[0].training, 140591004393408) # hidden_states = attn.to_out[0](hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1776 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_out[0]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_out[0].lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.to_out[0].lora_A, 140533115624064) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_out[0].lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_out[0].lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.to_out[0].lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_out[0].lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.to_out[0].lora_A['default_0'], 140533115628384) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_out[0].lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_out[0].lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.to_out[0].lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_out[0].lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_out[0].lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.to_out[0].lora_A['default_0'].weight, 140526776359472) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_out[0].lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.to_out[0].lora_B, 140533115627904) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_out[0].lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_out[0].lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.to_out[0].lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_out[0].lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.to_out[0].lora_B['default_0'], 140533115628672) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_out[0].lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_out[0].lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.to_out[0].lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_out[0].base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.to_out[0].base_layer, 140581770185168) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_out[0].base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_out[0].base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.to_out[0].base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_out[0].lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.to_out[0].lora_dropout, 140533115618352) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_out[0].lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_out[0].lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.to_out[0].lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_out[0].lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.to_out[0].lora_dropout['default_0'], 140533115623440) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_out[0].lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_out[0].lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.to_out[0].lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_out[0].scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[18].attn.to_out[0].scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[18].attn.to_out[0].scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_out[0].scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[18].attn.to_out[0].scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_out[0].use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[18].attn.to_out[0].use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[18].attn.to_out[0].use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_out[0].use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.to_out[0].use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_out[0]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_out[0]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_out[0].merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[18].attn.to_out[0].merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[18].attn.to_out[0].merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_out[0]._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.to_out[0]._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_out[0]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_out[0]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_out[0]._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[18].attn.to_out[0]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[18].attn.to_out[0]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_out[1], accessed_by=GetItemGuardAccessor(1) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.to_out[1], 140581770185216) # hidden_states = attn.to_out[1](hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1778 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_out[1].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_out[1].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.to_out[1].training, 140591004393440) # hidden_states = attn.to_out[1](hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1778 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.add_k_proj, accessed_by=DictGetItemGuardAccessor(add_k_proj) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.add_k_proj, 140533115040576) # encoder_hidden_states_key_proj = attn.add_k_proj(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1736 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.add_k_proj.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[18].attn.add_k_proj.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.add_k_proj.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.add_k_proj.training, 140591004393408) # encoder_hidden_states_key_proj = attn.add_k_proj(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1736 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.add_k_proj._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.add_k_proj.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.add_k_proj.lora_A, 140533115041488) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.add_k_proj.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.add_k_proj.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.add_k_proj.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.add_k_proj.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.add_k_proj.lora_A['default_0'], 140533115038896) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.add_k_proj.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.add_k_proj.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.add_k_proj.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.add_k_proj.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.add_k_proj.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.add_k_proj.lora_A['default_0'].weight, 140526776364592) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.add_k_proj.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.add_k_proj.lora_B, 140533115027760) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.add_k_proj.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.add_k_proj.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.add_k_proj.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.add_k_proj.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.add_k_proj.lora_B['default_0'], 140533115040816) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.add_k_proj.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.add_k_proj.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.add_k_proj.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.add_k_proj.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.add_k_proj.base_layer, 140581770184976) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.add_k_proj.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.add_k_proj.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.add_k_proj.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.add_k_proj.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.add_k_proj.lora_dropout, 140533115031888) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.add_k_proj.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.add_k_proj.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.add_k_proj.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.add_k_proj.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.add_k_proj.lora_dropout['default_0'], 140533115041680) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.add_k_proj.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.add_k_proj.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.add_k_proj.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.add_k_proj.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[18].attn.add_k_proj.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[18].attn.add_k_proj.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.add_k_proj.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[18].attn.add_k_proj.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.add_k_proj.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[18].attn.add_k_proj.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[18].attn.add_k_proj.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.add_k_proj.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.add_k_proj.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.add_k_proj._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.add_k_proj._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.add_k_proj.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[18].attn.add_k_proj.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[18].attn.add_k_proj.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.add_k_proj._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.add_k_proj._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.add_k_proj._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.add_k_proj._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.add_k_proj._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[18].attn.add_k_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[18].attn.add_k_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.add_q_proj, accessed_by=DictGetItemGuardAccessor(add_q_proj) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.add_q_proj, 140533115630256) # encoder_hidden_states_query_proj = attn.add_q_proj(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1735 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.add_q_proj.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[18].attn.add_q_proj.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.add_q_proj.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.add_q_proj.training, 140591004393408) # encoder_hidden_states_query_proj = attn.add_q_proj(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1735 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.add_q_proj._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.add_q_proj.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.add_q_proj.lora_A, 140533115629872) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.add_q_proj.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.add_q_proj.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.add_q_proj.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.add_q_proj.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.add_q_proj.lora_A['default_0'], 140533115617728) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.add_q_proj.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.add_q_proj.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.add_q_proj.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.add_q_proj.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.add_q_proj.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.add_q_proj.lora_A['default_0'].weight, 140526776354112) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.add_q_proj.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.add_q_proj.lora_B, 140533115630784) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.add_q_proj.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.add_q_proj.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.add_q_proj.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.add_q_proj.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.add_q_proj.lora_B['default_0'], 140533115632176) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.add_q_proj.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.add_q_proj.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.add_q_proj.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.add_q_proj.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.add_q_proj.base_layer, 140581770185072) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.add_q_proj.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.add_q_proj.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.add_q_proj.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.add_q_proj.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.add_q_proj.lora_dropout, 140533115627664) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.add_q_proj.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.add_q_proj.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.add_q_proj.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.add_q_proj.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.add_q_proj.lora_dropout['default_0'], 140533115633520) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.add_q_proj.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.add_q_proj.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.add_q_proj.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.add_q_proj.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[18].attn.add_q_proj.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[18].attn.add_q_proj.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.add_q_proj.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[18].attn.add_q_proj.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.add_q_proj.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[18].attn.add_q_proj.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[18].attn.add_q_proj.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.add_q_proj.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.add_q_proj.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.add_q_proj._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.add_q_proj._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.add_q_proj.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[18].attn.add_q_proj.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[18].attn.add_q_proj.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.add_q_proj._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.add_q_proj._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.add_q_proj._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.add_q_proj._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.add_q_proj._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[18].attn.add_q_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[18].attn.add_q_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.add_v_proj, accessed_by=DictGetItemGuardAccessor(add_v_proj) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.add_v_proj, 140533115033328) # encoder_hidden_states_value_proj = attn.add_v_proj(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1737 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.add_v_proj.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[18].attn.add_v_proj.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.add_v_proj.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.add_v_proj.training, 140591004393408) # encoder_hidden_states_value_proj = attn.add_v_proj(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1737 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.add_v_proj._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.add_v_proj.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.add_v_proj.lora_A, 140533115629008) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.add_v_proj.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.add_v_proj.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.add_v_proj.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.add_v_proj.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.add_v_proj.lora_A['default_0'], 140533115630016) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.add_v_proj.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.add_v_proj.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.add_v_proj.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.add_v_proj.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.add_v_proj.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.add_v_proj.lora_A['default_0'].weight, 140526776358432) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.add_v_proj.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.add_v_proj.lora_B, 140533115630736) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.add_v_proj.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.add_v_proj.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.add_v_proj.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.add_v_proj.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.add_v_proj.lora_B['default_0'], 140533115630592) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.add_v_proj.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.add_v_proj.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.add_v_proj.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.add_v_proj.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.add_v_proj.base_layer, 140581770185024) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.add_v_proj.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.add_v_proj.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.add_v_proj.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.add_v_proj.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.add_v_proj.lora_dropout, 140533115041584) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.add_v_proj.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.add_v_proj.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.add_v_proj.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.add_v_proj.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.add_v_proj.lora_dropout['default_0'], 140533115038752) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.add_v_proj.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.add_v_proj.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.add_v_proj.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.add_v_proj.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[18].attn.add_v_proj.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[18].attn.add_v_proj.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.add_v_proj.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[18].attn.add_v_proj.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.add_v_proj.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[18].attn.add_v_proj.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[18].attn.add_v_proj.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.add_v_proj.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.add_v_proj.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.add_v_proj._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.add_v_proj._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.add_v_proj.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[18].attn.add_v_proj.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[18].attn.add_v_proj.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.add_v_proj._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.add_v_proj._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.add_v_proj._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.add_v_proj._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.add_v_proj._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[18].attn.add_v_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[18].attn.add_v_proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_add_out, accessed_by=DictGetItemGuardAccessor(to_add_out) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.to_add_out, 140533115628960) # encoder_hidden_states = attn.to_add_out(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1779 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_add_out.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[18].attn.to_add_out.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_add_out.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.to_add_out.training, 140591004393408) # encoder_hidden_states = attn.to_add_out(encoder_hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1779 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_add_out._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_add_out.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.to_add_out.lora_A, 140533115626656) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_add_out.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_add_out.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.to_add_out.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_add_out.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.to_add_out.lora_A['default_0'], 140533115627328) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_add_out.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_add_out.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.to_add_out.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_add_out.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_add_out.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.to_add_out.lora_A['default_0'].weight, 140526776366272) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_add_out.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.to_add_out.lora_B, 140533115628816) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_add_out.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_add_out.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.to_add_out.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_add_out.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.to_add_out.lora_B['default_0'], 140533115629152) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_add_out.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_add_out.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.to_add_out.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_add_out.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.to_add_out.base_layer, 140581770185264) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_add_out.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_add_out.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.to_add_out.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_add_out.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.to_add_out.lora_dropout, 140533115629296) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_add_out.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_add_out.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.to_add_out.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_add_out.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.to_add_out.lora_dropout['default_0'], 140533115628768) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_add_out.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_add_out.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.to_add_out.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_add_out.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[18].attn.to_add_out.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[18].attn.to_add_out.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_add_out.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[18].attn.to_add_out.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_add_out.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[18].attn.to_add_out.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[18].attn.to_add_out.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_add_out.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.to_add_out.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_add_out._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_add_out._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_add_out.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[18].attn.to_add_out.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[18].attn.to_add_out.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_add_out._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.to_add_out._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_add_out._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_add_out._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.to_add_out._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[18].attn.to_add_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[18].attn.to_add_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.norm_added_k, accessed_by=DictGetItemGuardAccessor(norm_added_k) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.norm_added_k, 140581770185408) # if attn.norm_added_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1751 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.norm_added_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[18].attn.norm_added_k.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.norm_added_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.norm_added_k.training, 140591004393440) # if attn.norm_added_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1751 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.norm_added_k.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[18].attn.norm_added_k.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.norm_added_k._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.norm_added_k.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.norm_added_k.weight, 140581773242944) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.norm_added_k._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.norm_added_k._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.norm_added_k._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.norm_added_k._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.norm_added_q, accessed_by=DictGetItemGuardAccessor(norm_added_q) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.norm_added_q, 140581770185312) # if attn.norm_added_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1749 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.norm_added_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[18].attn.norm_added_q.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.norm_added_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.norm_added_q.training, 140591004393440) # if attn.norm_added_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1749 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.norm_added_q.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[18].attn.norm_added_q.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.norm_added_q._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.norm_added_q.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.norm_added_q.weight, 140581773236704) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.norm_added_q._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.norm_added_q._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.norm_added_q._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.norm_added_q._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.heads, accessed_by=DictGetItemGuardAccessor(heads) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[18].attn.heads == 24 # head_dim = inner_dim // attn.heads # diffusers/src/diffusers/models/attention_processor.py:1721 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn.processor, accessed_by=DictGetItemGuardAccessor(processor) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[18].attn.processor, 93831581524080) # attn_parameters = set(inspect.signature(self.processor.__call__).parameters.keys()) # diffusers/src/diffusers/models/attention_processor.py:479 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].attn.processor, 140581770184544) # return self.processor( # diffusers/src/diffusers/models/attention_processor.py:490 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].attn._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].norm1, accessed_by=DictGetItemGuardAccessor(norm1) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].norm1, 140581770184112) # norm_hidden_states, gate_msa, shift_mlp, scale_mlp, gate_mlp = self.norm1(hidden_states, emb=temb) # diffusers/src/diffusers/models/transformers/transformer_flux.py:165 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].norm1.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[18].norm1.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].norm1.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].norm1.training, 140591004393440) # norm_hidden_states, gate_msa, shift_mlp, scale_mlp, gate_mlp = self.norm1(hidden_states, emb=temb) # diffusers/src/diffusers/models/transformers/transformer_flux.py:165 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].norm1.emb, accessed_by=DictGetItemGuardAccessor(emb) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].norm1.emb, 140591004478624) # if self.emb is not None: # diffusers/src/diffusers/models/normalization.py:135 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].norm1._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].norm1.norm, accessed_by=DictGetItemGuardAccessor(norm) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].norm1.norm, 140581770184256) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:139 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].norm1.norm.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].norm1.norm.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].norm1.norm.training, 140591004393440) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:139 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].norm1.silu, accessed_by=DictGetItemGuardAccessor(silu) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].norm1.silu, 140581770184160) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].norm1.silu.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].norm1.silu.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].norm1.silu.training, 140591004393440) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].norm1.linear, accessed_by=DictGetItemGuardAccessor(linear) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].norm1.linear, 140533115172416) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].norm1.linear.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[18].norm1.linear.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].norm1.linear.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].norm1.linear.training, 140591004393408) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].norm1.linear._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].norm1.linear.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].norm1.linear.lora_A, 140533114728944) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].norm1.linear.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].norm1.linear.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].norm1.linear.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].norm1.linear.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].norm1.linear.lora_A['default_0'], 140533114723328) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].norm1.linear.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].norm1.linear.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].norm1.linear.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].norm1.linear.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].norm1.linear.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].norm1.linear.lora_A['default_0'].weight, 140533119434592) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].norm1.linear.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].norm1.linear.lora_B, 140533114728656) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].norm1.linear.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].norm1.linear.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].norm1.linear.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].norm1.linear.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].norm1.linear.lora_B['default_0'], 140533114728752) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].norm1.linear.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].norm1.linear.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].norm1.linear.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].norm1.linear.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].norm1.linear.base_layer, 140581770184208) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].norm1.linear.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].norm1.linear.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].norm1.linear.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].norm1.linear.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].norm1.linear.lora_dropout, 140533114726832) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].norm1.linear.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].norm1.linear.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].norm1.linear.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].norm1.linear.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].norm1.linear.lora_dropout['default_0'], 140533114727168) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].norm1.linear.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].norm1.linear.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].norm1.linear.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].norm1.linear.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[18].norm1.linear.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[18].norm1.linear.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].norm1.linear.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[18].norm1.linear.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].norm1.linear.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[18].norm1.linear.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[18].norm1.linear.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].norm1.linear.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].norm1.linear.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].norm1.linear._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].norm1.linear._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].norm1.linear.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[18].norm1.linear.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[18].norm1.linear.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].norm1.linear._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].norm1.linear._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].norm1.linear._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].norm1.linear._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].norm1.linear._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[18].norm1.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[18].norm1.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].norm1._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].norm1._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].norm1._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].norm1._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].norm2, accessed_by=DictGetItemGuardAccessor(norm2) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].norm2, 140581770185456) # norm_hidden_states = self.norm2(hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:182 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].norm2.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].norm2.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].norm2.training, 140591004393440) # norm_hidden_states = self.norm2(hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:182 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff_context, accessed_by=DictGetItemGuardAccessor(ff_context) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].ff_context, 140581770185792) # context_ff_output = self.ff_context(norm_encoder_hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:198 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff_context.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[18].ff_context.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff_context.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].ff_context.training, 140591004393440) # context_ff_output = self.ff_context(norm_encoder_hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:198 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff_context._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff_context.net, accessed_by=DictGetItemGuardAccessor(net) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].ff_context.net, 140581770185936) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[18].ff_context.net, 93831537618768) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- LENGTH_CHECK: len(L['self'].transformer_blocks[18].ff_context.net) == 3 # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff_context.net.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff_context.net.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].ff_context.net.training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff_context.net[0], accessed_by=GetItemGuardAccessor(0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].ff_context.net[0], 140581770185888) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff_context.net[0].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[18].ff_context.net[0].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff_context.net[0].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].ff_context.net[0].training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff_context.net[0]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff_context.net[0].proj, accessed_by=DictGetItemGuardAccessor(proj) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].ff_context.net[0].proj, 140533115299984) # hidden_states = self.proj(hidden_states) # diffusers/src/diffusers/models/activations.py:88 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff_context.net[0].proj.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[18].ff_context.net[0].proj.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff_context.net[0].proj.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].ff_context.net[0].proj.training, 140591004393408) # hidden_states = self.proj(hidden_states) # diffusers/src/diffusers/models/activations.py:88 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff_context.net[0].proj._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff_context.net[0].proj.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].ff_context.net[0].proj.lora_A, 140533115298400) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff_context.net[0].proj.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff_context.net[0].proj.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].ff_context.net[0].proj.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff_context.net[0].proj.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].ff_context.net[0].proj.lora_A['default_0'], 140533115298016) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff_context.net[0].proj.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff_context.net[0].proj.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].ff_context.net[0].proj.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff_context.net[0].proj.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff_context.net[0].proj.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].ff_context.net[0].proj.lora_A['default_0'].weight, 140526684486080) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff_context.net[0].proj.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].ff_context.net[0].proj.lora_B, 140533115302816) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff_context.net[0].proj.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff_context.net[0].proj.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].ff_context.net[0].proj.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff_context.net[0].proj.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].ff_context.net[0].proj.lora_B['default_0'], 140533115294656) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff_context.net[0].proj.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff_context.net[0].proj.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].ff_context.net[0].proj.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff_context.net[0].proj.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].ff_context.net[0].proj.base_layer, 140581770185984) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff_context.net[0].proj.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff_context.net[0].proj.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].ff_context.net[0].proj.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff_context.net[0].proj.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].ff_context.net[0].proj.lora_dropout, 140533115303776) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff_context.net[0].proj.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff_context.net[0].proj.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].ff_context.net[0].proj.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff_context.net[0].proj.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].ff_context.net[0].proj.lora_dropout['default_0'], 140533115300128) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff_context.net[0].proj.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff_context.net[0].proj.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].ff_context.net[0].proj.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff_context.net[0].proj.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[18].ff_context.net[0].proj.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[18].ff_context.net[0].proj.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff_context.net[0].proj.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[18].ff_context.net[0].proj.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff_context.net[0].proj.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[18].ff_context.net[0].proj.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[18].ff_context.net[0].proj.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff_context.net[0].proj.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].ff_context.net[0].proj.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff_context.net[0].proj._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff_context.net[0].proj._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff_context.net[0].proj.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[18].ff_context.net[0].proj.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[18].ff_context.net[0].proj.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff_context.net[0].proj._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].ff_context.net[0].proj._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff_context.net[0].proj._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff_context.net[0].proj._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff_context.net[0].proj._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[18].ff_context.net[0].proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[18].ff_context.net[0].proj._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff_context.net[0].approximate, accessed_by=DictGetItemGuardAccessor(approximate) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[18].ff_context.net[0].approximate == 'tanh' # return F.gelu(gate, approximate=self.approximate) # diffusers/src/diffusers/models/activations.py:83 in gelu V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff_context.net[0]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff_context.net[0]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff_context.net[0]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff_context.net[0]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff_context.net[1], accessed_by=GetItemGuardAccessor(1) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].ff_context.net[1], 140581770186080) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff_context.net[1].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff_context.net[1].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].ff_context.net[1].training, 140591004393440) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff_context.net[2], accessed_by=GetItemGuardAccessor(2) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].ff_context.net[2], 140533115932096) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff_context.net[2].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[18].ff_context.net[2].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff_context.net[2].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].ff_context.net[2].training, 140591004393408) # for module in self.net: # diffusers/src/diffusers/models/attention.py:1200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff_context.net[2]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff_context.net[2].lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].ff_context.net[2].lora_A, 140533115944672) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff_context.net[2].lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff_context.net[2].lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].ff_context.net[2].lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff_context.net[2].lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].ff_context.net[2].lora_A['default_0'], 140533115931616) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff_context.net[2].lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff_context.net[2].lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].ff_context.net[2].lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff_context.net[2].lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff_context.net[2].lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].ff_context.net[2].lora_A['default_0'].weight, 140526684480320) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff_context.net[2].lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].ff_context.net[2].lora_B, 140533115943712) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff_context.net[2].lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff_context.net[2].lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].ff_context.net[2].lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff_context.net[2].lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].ff_context.net[2].lora_B['default_0'], 140533115941264) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff_context.net[2].lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff_context.net[2].lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].ff_context.net[2].lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff_context.net[2].base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].ff_context.net[2].base_layer, 140581770186128) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff_context.net[2].base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff_context.net[2].base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].ff_context.net[2].base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff_context.net[2].lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].ff_context.net[2].lora_dropout, 140533115930656) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff_context.net[2].lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff_context.net[2].lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].ff_context.net[2].lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff_context.net[2].lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].ff_context.net[2].lora_dropout['default_0'], 140533115929552) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff_context.net[2].lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff_context.net[2].lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].ff_context.net[2].lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff_context.net[2].scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[18].ff_context.net[2].scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[18].ff_context.net[2].scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff_context.net[2].scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[18].ff_context.net[2].scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff_context.net[2].use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[18].ff_context.net[2].use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[18].ff_context.net[2].use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff_context.net[2].use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].ff_context.net[2].use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff_context.net[2]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff_context.net[2]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff_context.net[2].merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[18].ff_context.net[2].merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[18].ff_context.net[2].merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff_context.net[2]._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].ff_context.net[2]._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff_context.net[2]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff_context.net[2]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff_context.net[2]._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[18].ff_context.net[2]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[18].ff_context.net[2]._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff_context._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff_context._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff_context._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].ff_context._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].norm1_context, accessed_by=DictGetItemGuardAccessor(norm1_context) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].norm1_context, 140581770184304) # norm_encoder_hidden_states, c_gate_msa, c_shift_mlp, c_scale_mlp, c_gate_mlp = self.norm1_context( # diffusers/src/diffusers/models/transformers/transformer_flux.py:167 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].norm1_context.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[18].norm1_context.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].norm1_context.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].norm1_context.training, 140591004393440) # norm_encoder_hidden_states, c_gate_msa, c_shift_mlp, c_scale_mlp, c_gate_mlp = self.norm1_context( # diffusers/src/diffusers/models/transformers/transformer_flux.py:167 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].norm1_context.emb, accessed_by=DictGetItemGuardAccessor(emb) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].norm1_context.emb, 140591004478624) # if self.emb is not None: # diffusers/src/diffusers/models/normalization.py:135 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].norm1_context._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].norm1_context.norm, accessed_by=DictGetItemGuardAccessor(norm) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].norm1_context.norm, 140581770184496) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:139 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].norm1_context.norm.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].norm1_context.norm.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].norm1_context.norm.training, 140591004393440) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:139 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].norm1_context.silu, accessed_by=DictGetItemGuardAccessor(silu) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].norm1_context.silu, 140581770184400) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].norm1_context.silu.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].norm1_context.silu.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].norm1_context.silu.training, 140591004393440) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].norm1_context.linear, accessed_by=DictGetItemGuardAccessor(linear) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].norm1_context.linear, 140533114722608) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].norm1_context.linear.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].transformer_blocks[18].norm1_context.linear.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].norm1_context.linear.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].norm1_context.linear.training, 140591004393408) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:137 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].norm1_context.linear._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].norm1_context.linear.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].norm1_context.linear.lora_A, 140533114728464) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].norm1_context.linear.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].norm1_context.linear.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].norm1_context.linear.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].norm1_context.linear.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].norm1_context.linear.lora_A['default_0'], 140533114723520) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].norm1_context.linear.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].norm1_context.linear.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].norm1_context.linear.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].norm1_context.linear.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].norm1_context.linear.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].norm1_context.linear.lora_A['default_0'].weight, 140526556139568) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].norm1_context.linear.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].norm1_context.linear.lora_B, 140533114728128) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].norm1_context.linear.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].norm1_context.linear.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].norm1_context.linear.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].norm1_context.linear.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].norm1_context.linear.lora_B['default_0'], 140533114722560) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].norm1_context.linear.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].norm1_context.linear.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].norm1_context.linear.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].norm1_context.linear.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].norm1_context.linear.base_layer, 140581770184448) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].norm1_context.linear.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].norm1_context.linear.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].norm1_context.linear.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].norm1_context.linear.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].norm1_context.linear.lora_dropout, 140533114719536) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].norm1_context.linear.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].norm1_context.linear.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].norm1_context.linear.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].norm1_context.linear.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].norm1_context.linear.lora_dropout['default_0'], 140533114721840) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].norm1_context.linear.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].norm1_context.linear.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].norm1_context.linear.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].norm1_context.linear.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[18].norm1_context.linear.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[18].norm1_context.linear.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].norm1_context.linear.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].transformer_blocks[18].norm1_context.linear.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].norm1_context.linear.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[18].norm1_context.linear.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].transformer_blocks[18].norm1_context.linear.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].norm1_context.linear.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].norm1_context.linear.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].norm1_context.linear._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].norm1_context.linear._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].norm1_context.linear.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].transformer_blocks[18].norm1_context.linear.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].transformer_blocks[18].norm1_context.linear.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].norm1_context.linear._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].norm1_context.linear._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].norm1_context.linear._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].norm1_context.linear._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].norm1_context.linear._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[18].norm1_context.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].transformer_blocks[18].norm1_context.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].norm1_context._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].norm1_context._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].norm1_context._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].norm1_context._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].norm2_context, accessed_by=DictGetItemGuardAccessor(norm2_context) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].norm2_context, 140581770185504) # norm_encoder_hidden_states = self.norm2_context(encoder_hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:195 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].norm2_context.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18].norm2_context.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].transformer_blocks[18].norm2_context.training, 140591004393440) # norm_encoder_hidden_states = self.norm2_context(encoder_hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:195 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- GuardManager: source=L['self'].transformer_blocks[18]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | +- GuardManager: source=L['self'].single_transformer_blocks, accessed_by=DictGetItemGuardAccessor(single_transformer_blocks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks, 140581770185840) # for index_block, block in enumerate(self.single_transformer_blocks): # diffusers/src/diffusers/models/transformers/transformer_flux.py:509 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | +- GuardManager: source=L['self'].single_transformer_blocks.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks.training, 140591004393440) # for index_block, block in enumerate(self.single_transformer_blocks): # diffusers/src/diffusers/models/transformers/transformer_flux.py:509 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0], accessed_by=GetItemGuardAccessor(0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[0], 140581770183776) # for index_block, block in enumerate(self.single_transformer_blocks): # diffusers/src/diffusers/models/transformers/transformer_flux.py:509 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[0].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[0].training, 140591004393440) # for index_block, block in enumerate(self.single_transformer_blocks): # diffusers/src/diffusers/models/transformers/transformer_flux.py:509 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].attn, accessed_by=DictGetItemGuardAccessor(attn) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[0].attn, 140581770186656) # attn_output = self.attn( # diffusers/src/diffusers/models/transformers/transformer_flux.py:91 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].attn.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[0].attn.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].attn.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[0].attn.training, 140591004393440) # attn_output = self.attn( # diffusers/src/diffusers/models/transformers/transformer_flux.py:91 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].attn._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].attn.to_k, accessed_by=DictGetItemGuardAccessor(to_k) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[0].attn.to_k, 140533115824656) # key = attn.to_k(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1717 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].attn.to_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[0].attn.to_k.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].attn.to_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[0].attn.to_k.training, 140591004393408) # key = attn.to_k(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1717 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].attn.to_k._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].attn.to_k.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[0].attn.to_k.lora_A, 140533115824512) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].attn.to_k.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].attn.to_k.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[0].attn.to_k.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].attn.to_k.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[0].attn.to_k.lora_A['default_0'], 140533115817792) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].attn.to_k.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].attn.to_k.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[0].attn.to_k.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].attn.to_k.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].attn.to_k.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[0].attn.to_k.lora_A['default_0'].weight, 140533122754384) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].attn.to_k.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[0].attn.to_k.lora_B, 140533115825184) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].attn.to_k.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].attn.to_k.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[0].attn.to_k.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].attn.to_k.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[0].attn.to_k.lora_B['default_0'], 140533115824800) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].attn.to_k.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].attn.to_k.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[0].attn.to_k.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].attn.to_k.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[0].attn.to_k.base_layer, 140581770186800) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].attn.to_k.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].attn.to_k.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[0].attn.to_k.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].attn.to_k.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[0].attn.to_k.lora_dropout, 140533115818656) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].attn.to_k.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].attn.to_k.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[0].attn.to_k.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].attn.to_k.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[0].attn.to_k.lora_dropout['default_0'], 140533115823312) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].attn.to_k.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].attn.to_k.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[0].attn.to_k.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].attn.to_k.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[0].attn.to_k.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[0].attn.to_k.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].attn.to_k.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[0].attn.to_k.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].attn.to_k.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[0].attn.to_k.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[0].attn.to_k.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].attn.to_k.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[0].attn.to_k.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].attn.to_k._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].attn.to_k._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].attn.to_k.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[0].attn.to_k.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[0].attn.to_k.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].attn.to_k._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[0].attn.to_k._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].attn.to_k._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].attn.to_k._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].attn.to_k._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[0].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[0].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].attn.to_q, accessed_by=DictGetItemGuardAccessor(to_q) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[0].attn.to_q, 140533115819376) # query = attn.to_q(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1716 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].attn.to_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[0].attn.to_q.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].attn.to_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[0].attn.to_q.training, 140591004393408) # query = attn.to_q(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1716 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].attn.to_q._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].attn.to_q.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[0].attn.to_q.lora_A, 140533115829696) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].attn.to_q.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].attn.to_q.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[0].attn.to_q.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].attn.to_q.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[0].attn.to_q.lora_A['default_0'], 140533115818848) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].attn.to_q.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].attn.to_q.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[0].attn.to_q.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].attn.to_q.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].attn.to_q.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[0].attn.to_q.lora_A['default_0'].weight, 140533122750304) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].attn.to_q.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[0].attn.to_q.lora_B, 140533115818032) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].attn.to_q.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].attn.to_q.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[0].attn.to_q.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].attn.to_q.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[0].attn.to_q.lora_B['default_0'], 140533115817552) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].attn.to_q.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].attn.to_q.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[0].attn.to_q.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].attn.to_q.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[0].attn.to_q.base_layer, 140581770186896) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].attn.to_q.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].attn.to_q.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[0].attn.to_q.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].attn.to_q.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[0].attn.to_q.lora_dropout, 140533115825280) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].attn.to_q.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].attn.to_q.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[0].attn.to_q.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].attn.to_q.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[0].attn.to_q.lora_dropout['default_0'], 140533115828688) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].attn.to_q.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].attn.to_q.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[0].attn.to_q.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].attn.to_q.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[0].attn.to_q.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[0].attn.to_q.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].attn.to_q.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[0].attn.to_q.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].attn.to_q.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[0].attn.to_q.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[0].attn.to_q.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].attn.to_q.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[0].attn.to_q.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].attn.to_q._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].attn.to_q._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].attn.to_q.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[0].attn.to_q.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[0].attn.to_q.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].attn.to_q._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[0].attn.to_q._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].attn.to_q._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].attn.to_q._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].attn.to_q._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[0].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[0].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].attn.to_v, accessed_by=DictGetItemGuardAccessor(to_v) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[0].attn.to_v, 140533115818128) # value = attn.to_v(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1718 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].attn.to_v.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[0].attn.to_v.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].attn.to_v.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[0].attn.to_v.training, 140591004393408) # value = attn.to_v(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1718 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].attn.to_v._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].attn.to_v.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[0].attn.to_v.lora_A, 140533114801312) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].attn.to_v.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].attn.to_v.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[0].attn.to_v.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].attn.to_v.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[0].attn.to_v.lora_A['default_0'], 140533114801696) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].attn.to_v.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].attn.to_v.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[0].attn.to_v.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].attn.to_v.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].attn.to_v.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[0].attn.to_v.lora_A['default_0'].weight, 140533112264544) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].attn.to_v.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[0].attn.to_v.lora_B, 140533114805152) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].attn.to_v.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].attn.to_v.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[0].attn.to_v.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].attn.to_v.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[0].attn.to_v.lora_B['default_0'], 140533114803952) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].attn.to_v.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].attn.to_v.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[0].attn.to_v.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].attn.to_v.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[0].attn.to_v.base_layer, 140581770186944) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].attn.to_v.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].attn.to_v.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[0].attn.to_v.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].attn.to_v.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[0].attn.to_v.lora_dropout, 140533115817216) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].attn.to_v.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].attn.to_v.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[0].attn.to_v.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].attn.to_v.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[0].attn.to_v.lora_dropout['default_0'], 140533115829744) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].attn.to_v.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].attn.to_v.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[0].attn.to_v.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].attn.to_v.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[0].attn.to_v.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[0].attn.to_v.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].attn.to_v.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[0].attn.to_v.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].attn.to_v.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[0].attn.to_v.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[0].attn.to_v.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].attn.to_v.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[0].attn.to_v.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].attn.to_v._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].attn.to_v._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].attn.to_v.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[0].attn.to_v.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[0].attn.to_v.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].attn.to_v._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[0].attn.to_v._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].attn.to_v._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].attn.to_v._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].attn.to_v._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[0].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[0].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].attn.norm_k, accessed_by=DictGetItemGuardAccessor(norm_k) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[0].attn.norm_k, 140581770186848) # if attn.norm_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1729 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].attn.norm_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[0].attn.norm_k.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].attn.norm_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[0].attn.norm_k.training, 140591004393440) # if attn.norm_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1729 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].attn.norm_k.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[0].attn.norm_k.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].attn.norm_k._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].attn.norm_k.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[0].attn.norm_k.weight, 140581765130624) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].attn.norm_k._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].attn.norm_k._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].attn.norm_k._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].attn.norm_k._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].attn.norm_q, accessed_by=DictGetItemGuardAccessor(norm_q) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[0].attn.norm_q, 140581770186704) # if attn.norm_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1727 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].attn.norm_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[0].attn.norm_q.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].attn.norm_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[0].attn.norm_q.training, 140591004393440) # if attn.norm_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1727 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].attn.norm_q.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[0].attn.norm_q.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].attn.norm_q._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].attn.norm_q.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[0].attn.norm_q.weight, 140581765886208) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].attn.norm_q._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].attn.norm_q._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].attn.norm_q._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].attn.norm_q._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].attn.heads, accessed_by=DictGetItemGuardAccessor(heads) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[0].attn.heads == 24 # head_dim = inner_dim // attn.heads # diffusers/src/diffusers/models/attention_processor.py:1721 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].attn.processor, accessed_by=DictGetItemGuardAccessor(processor) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[0].attn.processor, 93831581524080) # attn_parameters = set(inspect.signature(self.processor.__call__).parameters.keys()) # diffusers/src/diffusers/models/attention_processor.py:479 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[0].attn.processor, 140581770186608) # return self.processor( # diffusers/src/diffusers/models/attention_processor.py:490 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].attn._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].attn._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].attn._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].attn._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].attn.forward, accessed_by=GetAttrGuardAccessor(forward) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].attn.forward, accessed_by=FuncDefaultsGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].attn.forward.__defaults__[0], accessed_by=GetItemGuardAccessor(0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[0].attn.forward.__defaults__[0], 140591004478624) # batch_size, _, _ = hidden_states.shape if encoder_hidden_states is None else encoder_hidden_states.shape # diffusers/src/diffusers/models/attention_processor.py:1713 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].norm, accessed_by=DictGetItemGuardAccessor(norm) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[0].norm, 140581770186224) # norm_hidden_states, gate = self.norm(hidden_states, emb=temb) # diffusers/src/diffusers/models/transformers/transformer_flux.py:88 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].norm.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[0].norm.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].norm.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[0].norm.training, 140591004393440) # norm_hidden_states, gate = self.norm(hidden_states, emb=temb) # diffusers/src/diffusers/models/transformers/transformer_flux.py:88 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].norm._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].norm.norm, accessed_by=DictGetItemGuardAccessor(norm) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[0].norm.norm, 140581770186368) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:171 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].norm.norm.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].norm.norm.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[0].norm.norm.training, 140591004393440) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:171 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].norm.silu, accessed_by=DictGetItemGuardAccessor(silu) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[0].norm.silu, 140581770186272) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].norm.silu.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].norm.silu.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[0].norm.silu.training, 140591004393440) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].norm.linear, accessed_by=DictGetItemGuardAccessor(linear) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[0].norm.linear, 140533115933776) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].norm.linear.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[0].norm.linear.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].norm.linear.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[0].norm.linear.training, 140591004393408) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].norm.linear._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].norm.linear.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[0].norm.linear.lora_A, 140533115819760) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].norm.linear.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].norm.linear.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[0].norm.linear.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].norm.linear.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[0].norm.linear.lora_A['default_0'], 140533115822976) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].norm.linear.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].norm.linear.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[0].norm.linear.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].norm.linear.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].norm.linear.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[0].norm.linear.lora_A['default_0'].weight, 140526554348048) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].norm.linear.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[0].norm.linear.lora_B, 140533115820000) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].norm.linear.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].norm.linear.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[0].norm.linear.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].norm.linear.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[0].norm.linear.lora_B['default_0'], 140533115815536) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].norm.linear.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].norm.linear.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[0].norm.linear.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].norm.linear.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[0].norm.linear.base_layer, 140581770186320) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].norm.linear.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].norm.linear.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[0].norm.linear.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].norm.linear.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[0].norm.linear.lora_dropout, 140533115932384) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].norm.linear.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].norm.linear.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[0].norm.linear.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].norm.linear.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[0].norm.linear.lora_dropout['default_0'], 140533115934208) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].norm.linear.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].norm.linear.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[0].norm.linear.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].norm.linear.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[0].norm.linear.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[0].norm.linear.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].norm.linear.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[0].norm.linear.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].norm.linear.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[0].norm.linear.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[0].norm.linear.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].norm.linear.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[0].norm.linear.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].norm.linear._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].norm.linear._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].norm.linear.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[0].norm.linear.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[0].norm.linear.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].norm.linear._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[0].norm.linear._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].norm.linear._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].norm.linear._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].norm.linear._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[0].norm.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[0].norm.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].norm._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].norm._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].norm._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].norm._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].act_mlp, accessed_by=DictGetItemGuardAccessor(act_mlp) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[0].act_mlp, 140581770186512) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].act_mlp.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].act_mlp.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[0].act_mlp.training, 140591004393440) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].proj_mlp, accessed_by=DictGetItemGuardAccessor(proj_mlp) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[0].proj_mlp, 140533115821440) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].proj_mlp.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[0].proj_mlp.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].proj_mlp.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[0].proj_mlp.training, 140591004393408) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].proj_mlp._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].proj_mlp.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[0].proj_mlp.lora_A, 140533115815968) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].proj_mlp.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].proj_mlp.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[0].proj_mlp.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].proj_mlp.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[0].proj_mlp.lora_A['default_0'], 140533115818944) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].proj_mlp.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].proj_mlp.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[0].proj_mlp.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].proj_mlp.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].proj_mlp.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[0].proj_mlp.lora_A['default_0'].weight, 140526554348688) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].proj_mlp.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[0].proj_mlp.lora_B, 140533115819616) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].proj_mlp.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].proj_mlp.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[0].proj_mlp.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].proj_mlp.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[0].proj_mlp.lora_B['default_0'], 140533115819136) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].proj_mlp.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].proj_mlp.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[0].proj_mlp.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].proj_mlp.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[0].proj_mlp.base_layer, 140581770186464) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].proj_mlp.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].proj_mlp.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[0].proj_mlp.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].proj_mlp.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[0].proj_mlp.lora_dropout, 140533115823168) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].proj_mlp.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].proj_mlp.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[0].proj_mlp.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].proj_mlp.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[0].proj_mlp.lora_dropout['default_0'], 140533115822784) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].proj_mlp.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].proj_mlp.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[0].proj_mlp.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].proj_mlp.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[0].proj_mlp.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[0].proj_mlp.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].proj_mlp.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[0].proj_mlp.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].proj_mlp.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[0].proj_mlp.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[0].proj_mlp.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].proj_mlp.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[0].proj_mlp.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].proj_mlp._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].proj_mlp._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].proj_mlp.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[0].proj_mlp.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[0].proj_mlp.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].proj_mlp._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[0].proj_mlp._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].proj_mlp._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].proj_mlp._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].proj_mlp._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[0].proj_mlp._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[0].proj_mlp._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].proj_out, accessed_by=DictGetItemGuardAccessor(proj_out) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[0].proj_out, 140533115830224) # hidden_states = gate * self.proj_out(hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:98 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].proj_out.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[0].proj_out.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].proj_out.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[0].proj_out.training, 140591004393408) # hidden_states = gate * self.proj_out(hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:98 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].proj_out._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].proj_out.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[0].proj_out.lora_A, 140533115827776) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].proj_out.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].proj_out.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[0].proj_out.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].proj_out.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[0].proj_out.lora_A['default_0'], 140533115817168) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].proj_out.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].proj_out.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[0].proj_out.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].proj_out.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].proj_out.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[0].proj_out.lora_A['default_0'].weight, 140526554348448) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].proj_out.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[0].proj_out.lora_B, 140533115823984) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].proj_out.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].proj_out.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[0].proj_out.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].proj_out.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[0].proj_out.lora_B['default_0'], 140533115820144) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].proj_out.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].proj_out.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[0].proj_out.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].proj_out.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[0].proj_out.base_layer, 140581770186560) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].proj_out.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].proj_out.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[0].proj_out.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].proj_out.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[0].proj_out.lora_dropout, 140533115827200) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].proj_out.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].proj_out.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[0].proj_out.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].proj_out.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[0].proj_out.lora_dropout['default_0'], 140533115820720) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].proj_out.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].proj_out.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[0].proj_out.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].proj_out.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[0].proj_out.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[0].proj_out.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].proj_out.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[0].proj_out.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].proj_out.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[0].proj_out.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[0].proj_out.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].proj_out.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[0].proj_out.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].proj_out._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].proj_out._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].proj_out.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[0].proj_out.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[0].proj_out.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].proj_out._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[0].proj_out._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].proj_out._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].proj_out._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0].proj_out._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[0].proj_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[0].proj_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[0]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1], accessed_by=GetItemGuardAccessor(1) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[1], 140581770186176) # for index_block, block in enumerate(self.single_transformer_blocks): # diffusers/src/diffusers/models/transformers/transformer_flux.py:509 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[1].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[1].training, 140591004393440) # for index_block, block in enumerate(self.single_transformer_blocks): # diffusers/src/diffusers/models/transformers/transformer_flux.py:509 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].attn, accessed_by=DictGetItemGuardAccessor(attn) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[1].attn, 140581770187424) # attn_output = self.attn( # diffusers/src/diffusers/models/transformers/transformer_flux.py:91 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].attn.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[1].attn.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].attn.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[1].attn.training, 140591004393440) # attn_output = self.attn( # diffusers/src/diffusers/models/transformers/transformer_flux.py:91 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].attn._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].attn.to_k, accessed_by=DictGetItemGuardAccessor(to_k) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[1].attn.to_k, 140533114903552) # key = attn.to_k(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1717 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].attn.to_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[1].attn.to_k.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].attn.to_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[1].attn.to_k.training, 140591004393408) # key = attn.to_k(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1717 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].attn.to_k._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].attn.to_k.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[1].attn.to_k.lora_A, 140533114900672) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].attn.to_k.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].attn.to_k.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[1].attn.to_k.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].attn.to_k.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[1].attn.to_k.lora_A['default_0'], 140533114865648) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].attn.to_k.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].attn.to_k.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[1].attn.to_k.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].attn.to_k.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].attn.to_k.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[1].attn.to_k.lora_A['default_0'].weight, 140537326795440) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].attn.to_k.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[1].attn.to_k.lora_B, 140533114898080) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].attn.to_k.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].attn.to_k.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[1].attn.to_k.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].attn.to_k.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[1].attn.to_k.lora_B['default_0'], 140533113862272) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].attn.to_k.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].attn.to_k.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[1].attn.to_k.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].attn.to_k.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[1].attn.to_k.base_layer, 140581770187568) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].attn.to_k.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].attn.to_k.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[1].attn.to_k.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].attn.to_k.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[1].attn.to_k.lora_dropout, 140533114912576) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].attn.to_k.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].attn.to_k.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[1].attn.to_k.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].attn.to_k.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[1].attn.to_k.lora_dropout['default_0'], 140533114906192) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].attn.to_k.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].attn.to_k.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[1].attn.to_k.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].attn.to_k.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[1].attn.to_k.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[1].attn.to_k.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].attn.to_k.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[1].attn.to_k.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].attn.to_k.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[1].attn.to_k.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[1].attn.to_k.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].attn.to_k.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[1].attn.to_k.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].attn.to_k._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].attn.to_k._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].attn.to_k.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[1].attn.to_k.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[1].attn.to_k.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].attn.to_k._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[1].attn.to_k._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].attn.to_k._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].attn.to_k._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].attn.to_k._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[1].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[1].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].attn.to_q, accessed_by=DictGetItemGuardAccessor(to_q) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[1].attn.to_q, 140533116315408) # query = attn.to_q(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1716 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].attn.to_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[1].attn.to_q.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].attn.to_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[1].attn.to_q.training, 140591004393408) # query = attn.to_q(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1716 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].attn.to_q._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].attn.to_q.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[1].attn.to_q.lora_A, 140533114906480) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].attn.to_q.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].attn.to_q.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[1].attn.to_q.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].attn.to_q.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[1].attn.to_q.lora_A['default_0'], 140533114911376) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].attn.to_q.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].attn.to_q.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[1].attn.to_q.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].attn.to_q.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].attn.to_q.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[1].attn.to_q.lora_A['default_0'].weight, 140533135597520) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].attn.to_q.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[1].attn.to_q.lora_B, 140533114906432) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].attn.to_q.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].attn.to_q.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[1].attn.to_q.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].attn.to_q.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[1].attn.to_q.lora_B['default_0'], 140533114906144) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].attn.to_q.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].attn.to_q.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[1].attn.to_q.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].attn.to_q.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[1].attn.to_q.base_layer, 140581770187664) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].attn.to_q.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].attn.to_q.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[1].attn.to_q.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].attn.to_q.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[1].attn.to_q.lora_dropout, 140533114905760) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].attn.to_q.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].attn.to_q.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[1].attn.to_q.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].attn.to_q.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[1].attn.to_q.lora_dropout['default_0'], 140533116313824) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].attn.to_q.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].attn.to_q.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[1].attn.to_q.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].attn.to_q.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[1].attn.to_q.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[1].attn.to_q.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].attn.to_q.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[1].attn.to_q.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].attn.to_q.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[1].attn.to_q.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[1].attn.to_q.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].attn.to_q.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[1].attn.to_q.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].attn.to_q._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].attn.to_q._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].attn.to_q.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[1].attn.to_q.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[1].attn.to_q.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].attn.to_q._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[1].attn.to_q._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].attn.to_q._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].attn.to_q._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].attn.to_q._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[1].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[1].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].attn.to_v, accessed_by=DictGetItemGuardAccessor(to_v) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[1].attn.to_v, 140533113858960) # value = attn.to_v(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1718 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].attn.to_v.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[1].attn.to_v.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].attn.to_v.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[1].attn.to_v.training, 140591004393408) # value = attn.to_v(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1718 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].attn.to_v._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].attn.to_v.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[1].attn.to_v.lora_A, 140533113853776) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].attn.to_v.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].attn.to_v.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[1].attn.to_v.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].attn.to_v.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[1].attn.to_v.lora_A['default_0'], 140533113860928) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].attn.to_v.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].attn.to_v.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[1].attn.to_v.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].attn.to_v.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].attn.to_v.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[1].attn.to_v.lora_A['default_0'].weight, 140537326789520) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].attn.to_v.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[1].attn.to_v.lora_B, 140533113848928) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].attn.to_v.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].attn.to_v.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[1].attn.to_v.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].attn.to_v.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[1].attn.to_v.lora_B['default_0'], 140533113861984) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].attn.to_v.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].attn.to_v.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[1].attn.to_v.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].attn.to_v.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[1].attn.to_v.base_layer, 140581770187712) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].attn.to_v.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].attn.to_v.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[1].attn.to_v.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].attn.to_v.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[1].attn.to_v.lora_dropout, 140533113861504) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].attn.to_v.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].attn.to_v.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[1].attn.to_v.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].attn.to_v.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[1].attn.to_v.lora_dropout['default_0'], 140533113861024) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].attn.to_v.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].attn.to_v.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[1].attn.to_v.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].attn.to_v.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[1].attn.to_v.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[1].attn.to_v.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].attn.to_v.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[1].attn.to_v.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].attn.to_v.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[1].attn.to_v.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[1].attn.to_v.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].attn.to_v.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[1].attn.to_v.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].attn.to_v._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].attn.to_v._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].attn.to_v.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[1].attn.to_v.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[1].attn.to_v.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].attn.to_v._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[1].attn.to_v._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].attn.to_v._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].attn.to_v._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].attn.to_v._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[1].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[1].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].attn.norm_k, accessed_by=DictGetItemGuardAccessor(norm_k) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[1].attn.norm_k, 140581770187616) # if attn.norm_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1729 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].attn.norm_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[1].attn.norm_k.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].attn.norm_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[1].attn.norm_k.training, 140591004393440) # if attn.norm_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1729 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].attn.norm_k.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[1].attn.norm_k.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].attn.norm_k._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].attn.norm_k.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[1].attn.norm_k.weight, 140581765129024) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].attn.norm_k._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].attn.norm_k._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].attn.norm_k._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].attn.norm_k._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].attn.norm_q, accessed_by=DictGetItemGuardAccessor(norm_q) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[1].attn.norm_q, 140581770187472) # if attn.norm_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1727 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].attn.norm_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[1].attn.norm_q.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].attn.norm_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[1].attn.norm_q.training, 140591004393440) # if attn.norm_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1727 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].attn.norm_q.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[1].attn.norm_q.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].attn.norm_q._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].attn.norm_q.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[1].attn.norm_q.weight, 140581783344112) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].attn.norm_q._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].attn.norm_q._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].attn.norm_q._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].attn.norm_q._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].attn.heads, accessed_by=DictGetItemGuardAccessor(heads) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[1].attn.heads == 24 # head_dim = inner_dim // attn.heads # diffusers/src/diffusers/models/attention_processor.py:1721 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].attn.processor, accessed_by=DictGetItemGuardAccessor(processor) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[1].attn.processor, 93831581524080) # attn_parameters = set(inspect.signature(self.processor.__call__).parameters.keys()) # diffusers/src/diffusers/models/attention_processor.py:479 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[1].attn.processor, 140581770187376) # return self.processor( # diffusers/src/diffusers/models/attention_processor.py:490 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].attn._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].attn._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].attn._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].attn._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].attn.forward, accessed_by=GetAttrGuardAccessor(forward) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].attn.forward, accessed_by=FuncDefaultsGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].attn.forward.__defaults__[0], accessed_by=GetItemGuardAccessor(0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[1].attn.forward.__defaults__[0], 140591004478624) # batch_size, _, _ = hidden_states.shape if encoder_hidden_states is None else encoder_hidden_states.shape # diffusers/src/diffusers/models/attention_processor.py:1713 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].norm, accessed_by=DictGetItemGuardAccessor(norm) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[1].norm, 140581770187040) # norm_hidden_states, gate = self.norm(hidden_states, emb=temb) # diffusers/src/diffusers/models/transformers/transformer_flux.py:88 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].norm.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[1].norm.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].norm.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[1].norm.training, 140591004393440) # norm_hidden_states, gate = self.norm(hidden_states, emb=temb) # diffusers/src/diffusers/models/transformers/transformer_flux.py:88 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].norm._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].norm.norm, accessed_by=DictGetItemGuardAccessor(norm) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[1].norm.norm, 140581770187184) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:171 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].norm.norm.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].norm.norm.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[1].norm.norm.training, 140591004393440) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:171 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].norm.silu, accessed_by=DictGetItemGuardAccessor(silu) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[1].norm.silu, 140581770187088) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].norm.silu.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].norm.silu.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[1].norm.silu.training, 140591004393440) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].norm.linear, accessed_by=DictGetItemGuardAccessor(linear) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[1].norm.linear, 140533114803856) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].norm.linear.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[1].norm.linear.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].norm.linear.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[1].norm.linear.training, 140591004393408) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].norm.linear._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].norm.linear.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[1].norm.linear.lora_A, 140533114798384) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].norm.linear.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].norm.linear.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[1].norm.linear.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].norm.linear.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[1].norm.linear.lora_A['default_0'], 140533116312816) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].norm.linear.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].norm.linear.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[1].norm.linear.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].norm.linear.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].norm.linear.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[1].norm.linear.lora_A['default_0'].weight, 140533112271584) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].norm.linear.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[1].norm.linear.lora_B, 140533116320160) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].norm.linear.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].norm.linear.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[1].norm.linear.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].norm.linear.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[1].norm.linear.lora_B['default_0'], 140533116313536) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].norm.linear.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].norm.linear.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[1].norm.linear.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].norm.linear.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[1].norm.linear.base_layer, 140581770187136) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].norm.linear.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].norm.linear.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[1].norm.linear.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].norm.linear.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[1].norm.linear.lora_dropout, 140533114810576) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].norm.linear.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].norm.linear.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[1].norm.linear.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].norm.linear.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[1].norm.linear.lora_dropout['default_0'], 140533114803904) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].norm.linear.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].norm.linear.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[1].norm.linear.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].norm.linear.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[1].norm.linear.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[1].norm.linear.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].norm.linear.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[1].norm.linear.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].norm.linear.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[1].norm.linear.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[1].norm.linear.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].norm.linear.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[1].norm.linear.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].norm.linear._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].norm.linear._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].norm.linear.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[1].norm.linear.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[1].norm.linear.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].norm.linear._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[1].norm.linear._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].norm.linear._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].norm.linear._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].norm.linear._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[1].norm.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[1].norm.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].norm._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].norm._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].norm._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].norm._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].act_mlp, accessed_by=DictGetItemGuardAccessor(act_mlp) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[1].act_mlp, 140581770187280) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].act_mlp.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].act_mlp.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[1].act_mlp.training, 140591004393440) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].proj_mlp, accessed_by=DictGetItemGuardAccessor(proj_mlp) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[1].proj_mlp, 140533116313296) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].proj_mlp.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[1].proj_mlp.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].proj_mlp.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[1].proj_mlp.training, 140591004393408) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].proj_mlp._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].proj_mlp.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[1].proj_mlp.lora_A, 140533116316608) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].proj_mlp.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].proj_mlp.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[1].proj_mlp.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].proj_mlp.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[1].proj_mlp.lora_A['default_0'], 140533116318672) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].proj_mlp.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].proj_mlp.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[1].proj_mlp.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].proj_mlp.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].proj_mlp.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[1].proj_mlp.lora_A['default_0'].weight, 140533112261024) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].proj_mlp.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[1].proj_mlp.lora_B, 140533116313632) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].proj_mlp.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].proj_mlp.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[1].proj_mlp.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].proj_mlp.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[1].proj_mlp.lora_B['default_0'], 140533116314880) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].proj_mlp.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].proj_mlp.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[1].proj_mlp.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].proj_mlp.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[1].proj_mlp.base_layer, 140581770187232) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].proj_mlp.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].proj_mlp.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[1].proj_mlp.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].proj_mlp.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[1].proj_mlp.lora_dropout, 140533116319248) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].proj_mlp.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].proj_mlp.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[1].proj_mlp.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].proj_mlp.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[1].proj_mlp.lora_dropout['default_0'], 140533116315360) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].proj_mlp.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].proj_mlp.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[1].proj_mlp.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].proj_mlp.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[1].proj_mlp.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[1].proj_mlp.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].proj_mlp.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[1].proj_mlp.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].proj_mlp.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[1].proj_mlp.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[1].proj_mlp.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].proj_mlp.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[1].proj_mlp.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].proj_mlp._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].proj_mlp._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].proj_mlp.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[1].proj_mlp.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[1].proj_mlp.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].proj_mlp._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[1].proj_mlp._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].proj_mlp._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].proj_mlp._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].proj_mlp._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[1].proj_mlp._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[1].proj_mlp._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].proj_out, accessed_by=DictGetItemGuardAccessor(proj_out) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[1].proj_out, 140533116311760) # hidden_states = gate * self.proj_out(hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:98 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].proj_out.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[1].proj_out.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].proj_out.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[1].proj_out.training, 140591004393408) # hidden_states = gate * self.proj_out(hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:98 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].proj_out._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].proj_out.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[1].proj_out.lora_A, 140533116321696) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].proj_out.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].proj_out.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[1].proj_out.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].proj_out.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[1].proj_out.lora_A['default_0'], 140533116321744) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].proj_out.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].proj_out.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[1].proj_out.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].proj_out.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].proj_out.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[1].proj_out.lora_A['default_0'].weight, 140533135592080) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].proj_out.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[1].proj_out.lora_B, 140533116306624) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].proj_out.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].proj_out.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[1].proj_out.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].proj_out.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[1].proj_out.lora_B['default_0'], 140533116319200) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].proj_out.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].proj_out.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[1].proj_out.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].proj_out.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[1].proj_out.base_layer, 140581770187328) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].proj_out.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].proj_out.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[1].proj_out.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].proj_out.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[1].proj_out.lora_dropout, 140533116313584) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].proj_out.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].proj_out.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[1].proj_out.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].proj_out.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[1].proj_out.lora_dropout['default_0'], 140533116318624) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].proj_out.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].proj_out.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[1].proj_out.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].proj_out.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[1].proj_out.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[1].proj_out.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].proj_out.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[1].proj_out.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].proj_out.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[1].proj_out.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[1].proj_out.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].proj_out.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[1].proj_out.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].proj_out._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].proj_out._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].proj_out.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[1].proj_out.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[1].proj_out.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].proj_out._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[1].proj_out._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].proj_out._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].proj_out._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1].proj_out._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[1].proj_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[1].proj_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[1]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2], accessed_by=GetItemGuardAccessor(2) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[2], 140581770186992) # for index_block, block in enumerate(self.single_transformer_blocks): # diffusers/src/diffusers/models/transformers/transformer_flux.py:509 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[2].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[2].training, 140591004393440) # for index_block, block in enumerate(self.single_transformer_blocks): # diffusers/src/diffusers/models/transformers/transformer_flux.py:509 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].attn, accessed_by=DictGetItemGuardAccessor(attn) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[2].attn, 140581770188192) # attn_output = self.attn( # diffusers/src/diffusers/models/transformers/transformer_flux.py:91 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].attn.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[2].attn.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].attn.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[2].attn.training, 140591004393440) # attn_output = self.attn( # diffusers/src/diffusers/models/transformers/transformer_flux.py:91 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].attn._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].attn.to_k, accessed_by=DictGetItemGuardAccessor(to_k) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[2].attn.to_k, 140533114763632) # key = attn.to_k(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1717 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].attn.to_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[2].attn.to_k.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].attn.to_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[2].attn.to_k.training, 140591004393408) # key = attn.to_k(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1717 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].attn.to_k._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].attn.to_k.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[2].attn.to_k.lora_A, 140533113635600) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].attn.to_k.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].attn.to_k.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[2].attn.to_k.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].attn.to_k.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[2].attn.to_k.lora_A['default_0'], 140533113651008) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].attn.to_k.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].attn.to_k.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[2].attn.to_k.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].attn.to_k.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].attn.to_k.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[2].attn.to_k.lora_A['default_0'].weight, 140526270706160) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].attn.to_k.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[2].attn.to_k.lora_B, 140533113645056) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].attn.to_k.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].attn.to_k.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[2].attn.to_k.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].attn.to_k.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[2].attn.to_k.lora_B['default_0'], 140533113649136) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].attn.to_k.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].attn.to_k.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[2].attn.to_k.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].attn.to_k.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[2].attn.to_k.base_layer, 140581770188336) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].attn.to_k.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].attn.to_k.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[2].attn.to_k.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].attn.to_k.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[2].attn.to_k.lora_dropout, 140533113639296) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].attn.to_k.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].attn.to_k.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[2].attn.to_k.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].attn.to_k.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[2].attn.to_k.lora_dropout['default_0'], 140533113645632) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].attn.to_k.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].attn.to_k.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[2].attn.to_k.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].attn.to_k.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[2].attn.to_k.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[2].attn.to_k.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].attn.to_k.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[2].attn.to_k.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].attn.to_k.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[2].attn.to_k.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[2].attn.to_k.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].attn.to_k.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[2].attn.to_k.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].attn.to_k._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].attn.to_k._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].attn.to_k.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[2].attn.to_k.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[2].attn.to_k.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].attn.to_k._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[2].attn.to_k._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].attn.to_k._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].attn.to_k._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].attn.to_k._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[2].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[2].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].attn.to_q, accessed_by=DictGetItemGuardAccessor(to_q) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[2].attn.to_q, 140533113851280) # query = attn.to_q(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1716 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].attn.to_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[2].attn.to_q.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].attn.to_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[2].attn.to_q.training, 140591004393408) # query = attn.to_q(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1716 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].attn.to_q._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].attn.to_q.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[2].attn.to_q.lora_A, 140533113860496) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].attn.to_q.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].attn.to_q.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[2].attn.to_q.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].attn.to_q.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[2].attn.to_q.lora_A['default_0'], 140533113851376) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].attn.to_q.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].attn.to_q.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[2].attn.to_q.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].attn.to_q.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].attn.to_q.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[2].attn.to_q.lora_A['default_0'].weight, 140526668404336) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].attn.to_q.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[2].attn.to_q.lora_B, 140533113849696) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].attn.to_q.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].attn.to_q.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[2].attn.to_q.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].attn.to_q.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[2].attn.to_q.lora_B['default_0'], 140533113851328) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].attn.to_q.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].attn.to_q.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[2].attn.to_q.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].attn.to_q.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[2].attn.to_q.base_layer, 140581770188432) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].attn.to_q.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].attn.to_q.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[2].attn.to_q.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].attn.to_q.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[2].attn.to_q.lora_dropout, 140533113850704) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].attn.to_q.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].attn.to_q.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[2].attn.to_q.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].attn.to_q.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[2].attn.to_q.lora_dropout['default_0'], 140533113851808) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].attn.to_q.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].attn.to_q.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[2].attn.to_q.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].attn.to_q.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[2].attn.to_q.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[2].attn.to_q.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].attn.to_q.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[2].attn.to_q.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].attn.to_q.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[2].attn.to_q.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[2].attn.to_q.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].attn.to_q.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[2].attn.to_q.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].attn.to_q._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].attn.to_q._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].attn.to_q.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[2].attn.to_q.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[2].attn.to_q.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].attn.to_q._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[2].attn.to_q._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].attn.to_q._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].attn.to_q._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].attn.to_q._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[2].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[2].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].attn.to_v, accessed_by=DictGetItemGuardAccessor(to_v) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[2].attn.to_v, 140533117473440) # value = attn.to_v(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1718 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].attn.to_v.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[2].attn.to_v.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].attn.to_v.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[2].attn.to_v.training, 140591004393408) # value = attn.to_v(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1718 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].attn.to_v._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].attn.to_v.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[2].attn.to_v.lora_A, 140533117477280) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].attn.to_v.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].attn.to_v.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[2].attn.to_v.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].attn.to_v.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[2].attn.to_v.lora_A['default_0'], 140533117469024) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].attn.to_v.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].attn.to_v.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[2].attn.to_v.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].attn.to_v.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].attn.to_v.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[2].attn.to_v.lora_A['default_0'].weight, 140526270697840) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].attn.to_v.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[2].attn.to_v.lora_B, 140533117481360) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].attn.to_v.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].attn.to_v.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[2].attn.to_v.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].attn.to_v.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[2].attn.to_v.lora_B['default_0'], 140533117478816) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].attn.to_v.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].attn.to_v.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[2].attn.to_v.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].attn.to_v.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[2].attn.to_v.base_layer, 140581770188480) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].attn.to_v.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].attn.to_v.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[2].attn.to_v.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].attn.to_v.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[2].attn.to_v.lora_dropout, 140533117476416) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].attn.to_v.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].attn.to_v.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[2].attn.to_v.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].attn.to_v.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[2].attn.to_v.lora_dropout['default_0'], 140533117474880) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].attn.to_v.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].attn.to_v.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[2].attn.to_v.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].attn.to_v.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[2].attn.to_v.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[2].attn.to_v.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].attn.to_v.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[2].attn.to_v.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].attn.to_v.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[2].attn.to_v.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[2].attn.to_v.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].attn.to_v.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[2].attn.to_v.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].attn.to_v._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].attn.to_v._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].attn.to_v.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[2].attn.to_v.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[2].attn.to_v.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].attn.to_v._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[2].attn.to_v._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].attn.to_v._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].attn.to_v._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].attn.to_v._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[2].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[2].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].attn.norm_k, accessed_by=DictGetItemGuardAccessor(norm_k) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[2].attn.norm_k, 140581770188384) # if attn.norm_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1729 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].attn.norm_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[2].attn.norm_k.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].attn.norm_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[2].attn.norm_k.training, 140591004393440) # if attn.norm_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1729 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].attn.norm_k.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[2].attn.norm_k.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].attn.norm_k._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].attn.norm_k.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[2].attn.norm_k.weight, 140581772721376) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].attn.norm_k._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].attn.norm_k._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].attn.norm_k._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].attn.norm_k._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].attn.norm_q, accessed_by=DictGetItemGuardAccessor(norm_q) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[2].attn.norm_q, 140581770188240) # if attn.norm_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1727 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].attn.norm_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[2].attn.norm_q.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].attn.norm_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[2].attn.norm_q.training, 140591004393440) # if attn.norm_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1727 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].attn.norm_q.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[2].attn.norm_q.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].attn.norm_q._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].attn.norm_q.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[2].attn.norm_q.weight, 140581765987072) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].attn.norm_q._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].attn.norm_q._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].attn.norm_q._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].attn.norm_q._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].attn.heads, accessed_by=DictGetItemGuardAccessor(heads) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[2].attn.heads == 24 # head_dim = inner_dim // attn.heads # diffusers/src/diffusers/models/attention_processor.py:1721 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].attn.processor, accessed_by=DictGetItemGuardAccessor(processor) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[2].attn.processor, 93831581524080) # attn_parameters = set(inspect.signature(self.processor.__call__).parameters.keys()) # diffusers/src/diffusers/models/attention_processor.py:479 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[2].attn.processor, 140581770188144) # return self.processor( # diffusers/src/diffusers/models/attention_processor.py:490 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].attn._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].attn._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].attn._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].attn._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].attn.forward, accessed_by=GetAttrGuardAccessor(forward) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].attn.forward, accessed_by=FuncDefaultsGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].attn.forward.__defaults__[0], accessed_by=GetItemGuardAccessor(0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[2].attn.forward.__defaults__[0], 140591004478624) # batch_size, _, _ = hidden_states.shape if encoder_hidden_states is None else encoder_hidden_states.shape # diffusers/src/diffusers/models/attention_processor.py:1713 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].norm, accessed_by=DictGetItemGuardAccessor(norm) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[2].norm, 140581770187808) # norm_hidden_states, gate = self.norm(hidden_states, emb=temb) # diffusers/src/diffusers/models/transformers/transformer_flux.py:88 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].norm.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[2].norm.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].norm.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[2].norm.training, 140591004393440) # norm_hidden_states, gate = self.norm(hidden_states, emb=temb) # diffusers/src/diffusers/models/transformers/transformer_flux.py:88 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].norm._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].norm.norm, accessed_by=DictGetItemGuardAccessor(norm) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[2].norm.norm, 140581770187952) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:171 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].norm.norm.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].norm.norm.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[2].norm.norm.training, 140591004393440) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:171 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].norm.silu, accessed_by=DictGetItemGuardAccessor(silu) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[2].norm.silu, 140581770187856) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].norm.silu.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].norm.silu.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[2].norm.silu.training, 140591004393440) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].norm.linear, accessed_by=DictGetItemGuardAccessor(linear) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[2].norm.linear, 140533113862032) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].norm.linear.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[2].norm.linear.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].norm.linear.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[2].norm.linear.training, 140591004393408) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].norm.linear._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].norm.linear.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[2].norm.linear.lora_A, 140533113861552) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].norm.linear.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].norm.linear.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[2].norm.linear.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].norm.linear.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[2].norm.linear.lora_A['default_0'], 140533113855552) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].norm.linear.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].norm.linear.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[2].norm.linear.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].norm.linear.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].norm.linear.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[2].norm.linear.lora_A['default_0'].weight, 140526668403136) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].norm.linear.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[2].norm.linear.lora_B, 140533113859296) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].norm.linear.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].norm.linear.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[2].norm.linear.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].norm.linear.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[2].norm.linear.lora_B['default_0'], 140533113861072) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].norm.linear.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].norm.linear.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[2].norm.linear.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].norm.linear.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[2].norm.linear.base_layer, 140581770187904) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].norm.linear.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].norm.linear.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[2].norm.linear.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].norm.linear.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[2].norm.linear.lora_dropout, 140533113860112) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].norm.linear.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].norm.linear.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[2].norm.linear.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].norm.linear.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[2].norm.linear.lora_dropout['default_0'], 140533113861408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].norm.linear.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].norm.linear.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[2].norm.linear.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].norm.linear.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[2].norm.linear.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[2].norm.linear.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].norm.linear.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[2].norm.linear.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].norm.linear.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[2].norm.linear.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[2].norm.linear.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].norm.linear.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[2].norm.linear.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].norm.linear._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].norm.linear._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].norm.linear.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[2].norm.linear.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[2].norm.linear.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].norm.linear._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[2].norm.linear._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].norm.linear._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].norm.linear._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].norm.linear._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[2].norm.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[2].norm.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].norm._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].norm._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].norm._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].norm._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].act_mlp, accessed_by=DictGetItemGuardAccessor(act_mlp) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[2].act_mlp, 140581770188048) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].act_mlp.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].act_mlp.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[2].act_mlp.training, 140591004393440) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].proj_mlp, accessed_by=DictGetItemGuardAccessor(proj_mlp) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[2].proj_mlp, 140533113854496) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].proj_mlp.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[2].proj_mlp.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].proj_mlp.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[2].proj_mlp.training, 140591004393408) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].proj_mlp._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].proj_mlp.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[2].proj_mlp.lora_A, 140533113852720) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].proj_mlp.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].proj_mlp.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[2].proj_mlp.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].proj_mlp.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[2].proj_mlp.lora_A['default_0'], 140533113851136) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].proj_mlp.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].proj_mlp.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[2].proj_mlp.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].proj_mlp.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].proj_mlp.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[2].proj_mlp.lora_A['default_0'].weight, 140526668398736) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].proj_mlp.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[2].proj_mlp.lora_B, 140533113858288) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].proj_mlp.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].proj_mlp.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[2].proj_mlp.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].proj_mlp.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[2].proj_mlp.lora_B['default_0'], 140533113853296) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].proj_mlp.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].proj_mlp.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[2].proj_mlp.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].proj_mlp.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[2].proj_mlp.base_layer, 140581770188000) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].proj_mlp.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].proj_mlp.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[2].proj_mlp.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].proj_mlp.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[2].proj_mlp.lora_dropout, 140533113852768) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].proj_mlp.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].proj_mlp.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[2].proj_mlp.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].proj_mlp.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[2].proj_mlp.lora_dropout['default_0'], 140533113853200) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].proj_mlp.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].proj_mlp.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[2].proj_mlp.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].proj_mlp.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[2].proj_mlp.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[2].proj_mlp.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].proj_mlp.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[2].proj_mlp.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].proj_mlp.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[2].proj_mlp.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[2].proj_mlp.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].proj_mlp.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[2].proj_mlp.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].proj_mlp._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].proj_mlp._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].proj_mlp.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[2].proj_mlp.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[2].proj_mlp.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].proj_mlp._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[2].proj_mlp._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].proj_mlp._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].proj_mlp._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].proj_mlp._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[2].proj_mlp._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[2].proj_mlp._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].proj_out, accessed_by=DictGetItemGuardAccessor(proj_out) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[2].proj_out, 140533113852480) # hidden_states = gate * self.proj_out(hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:98 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].proj_out.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[2].proj_out.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].proj_out.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[2].proj_out.training, 140591004393408) # hidden_states = gate * self.proj_out(hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:98 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].proj_out._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].proj_out.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[2].proj_out.lora_A, 140533113850272) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].proj_out.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].proj_out.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[2].proj_out.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].proj_out.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[2].proj_out.lora_A['default_0'], 140533113848832) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].proj_out.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].proj_out.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[2].proj_out.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].proj_out.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].proj_out.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[2].proj_out.lora_A['default_0'].weight, 140526668408576) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].proj_out.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[2].proj_out.lora_B, 140533113854544) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].proj_out.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].proj_out.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[2].proj_out.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].proj_out.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[2].proj_out.lora_B['default_0'], 140533113850656) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].proj_out.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].proj_out.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[2].proj_out.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].proj_out.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[2].proj_out.base_layer, 140581770188096) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].proj_out.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].proj_out.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[2].proj_out.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].proj_out.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[2].proj_out.lora_dropout, 140533113850944) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].proj_out.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].proj_out.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[2].proj_out.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].proj_out.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[2].proj_out.lora_dropout['default_0'], 140533113849744) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].proj_out.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].proj_out.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[2].proj_out.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].proj_out.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[2].proj_out.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[2].proj_out.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].proj_out.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[2].proj_out.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].proj_out.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[2].proj_out.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[2].proj_out.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].proj_out.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[2].proj_out.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].proj_out._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].proj_out._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].proj_out.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[2].proj_out.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[2].proj_out.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].proj_out._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[2].proj_out._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].proj_out._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].proj_out._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2].proj_out._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[2].proj_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[2].proj_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[2]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3], accessed_by=GetItemGuardAccessor(3) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[3], 140581770187760) # for index_block, block in enumerate(self.single_transformer_blocks): # diffusers/src/diffusers/models/transformers/transformer_flux.py:509 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[3].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[3].training, 140591004393440) # for index_block, block in enumerate(self.single_transformer_blocks): # diffusers/src/diffusers/models/transformers/transformer_flux.py:509 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].attn, accessed_by=DictGetItemGuardAccessor(attn) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[3].attn, 140581770188960) # attn_output = self.attn( # diffusers/src/diffusers/models/transformers/transformer_flux.py:91 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].attn.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[3].attn.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].attn.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[3].attn.training, 140591004393440) # attn_output = self.attn( # diffusers/src/diffusers/models/transformers/transformer_flux.py:91 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].attn._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].attn.to_k, accessed_by=DictGetItemGuardAccessor(to_k) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[3].attn.to_k, 140533115110400) # key = attn.to_k(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1717 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].attn.to_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[3].attn.to_k.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].attn.to_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[3].attn.to_k.training, 140591004393408) # key = attn.to_k(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1717 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].attn.to_k._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].attn.to_k.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[3].attn.to_k.lora_A, 140533115124944) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].attn.to_k.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].attn.to_k.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[3].attn.to_k.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].attn.to_k.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[3].attn.to_k.lora_A['default_0'], 140533115123840) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].attn.to_k.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].attn.to_k.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[3].attn.to_k.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].attn.to_k.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].attn.to_k.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[3].attn.to_k.lora_A['default_0'].weight, 140526553670784) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].attn.to_k.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[3].attn.to_k.lora_B, 140533115124368) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].attn.to_k.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].attn.to_k.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[3].attn.to_k.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].attn.to_k.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[3].attn.to_k.lora_B['default_0'], 140533115116016) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].attn.to_k.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].attn.to_k.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[3].attn.to_k.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].attn.to_k.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[3].attn.to_k.base_layer, 140581770189104) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].attn.to_k.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].attn.to_k.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[3].attn.to_k.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].attn.to_k.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[3].attn.to_k.lora_dropout, 140533115124176) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].attn.to_k.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].attn.to_k.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[3].attn.to_k.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].attn.to_k.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[3].attn.to_k.lora_dropout['default_0'], 140533115115824) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].attn.to_k.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].attn.to_k.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[3].attn.to_k.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].attn.to_k.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[3].attn.to_k.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[3].attn.to_k.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].attn.to_k.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[3].attn.to_k.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].attn.to_k.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[3].attn.to_k.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[3].attn.to_k.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].attn.to_k.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[3].attn.to_k.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].attn.to_k._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].attn.to_k._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].attn.to_k.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[3].attn.to_k.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[3].attn.to_k.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].attn.to_k._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[3].attn.to_k._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].attn.to_k._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].attn.to_k._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].attn.to_k._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[3].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[3].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].attn.to_q, accessed_by=DictGetItemGuardAccessor(to_q) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[3].attn.to_q, 140533115115392) # query = attn.to_q(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1716 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].attn.to_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[3].attn.to_q.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].attn.to_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[3].attn.to_q.training, 140591004393408) # query = attn.to_q(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1716 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].attn.to_q._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].attn.to_q.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[3].attn.to_q.lora_A, 140533115124128) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].attn.to_q.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].attn.to_q.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[3].attn.to_q.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].attn.to_q.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[3].attn.to_q.lora_A['default_0'], 140533115111264) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].attn.to_q.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].attn.to_q.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[3].attn.to_q.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].attn.to_q.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].attn.to_q.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[3].attn.to_q.lora_A['default_0'].weight, 140533117610928) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].attn.to_q.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[3].attn.to_q.lora_B, 140533115118656) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].attn.to_q.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].attn.to_q.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[3].attn.to_q.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].attn.to_q.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[3].attn.to_q.lora_B['default_0'], 140533115120384) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].attn.to_q.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].attn.to_q.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[3].attn.to_q.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].attn.to_q.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[3].attn.to_q.base_layer, 140581770189200) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].attn.to_q.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].attn.to_q.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[3].attn.to_q.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].attn.to_q.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[3].attn.to_q.lora_dropout, 140533115115872) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].attn.to_q.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].attn.to_q.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[3].attn.to_q.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].attn.to_q.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[3].attn.to_q.lora_dropout['default_0'], 140533115124656) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].attn.to_q.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].attn.to_q.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[3].attn.to_q.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].attn.to_q.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[3].attn.to_q.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[3].attn.to_q.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].attn.to_q.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[3].attn.to_q.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].attn.to_q.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[3].attn.to_q.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[3].attn.to_q.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].attn.to_q.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[3].attn.to_q.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].attn.to_q._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].attn.to_q._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].attn.to_q.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[3].attn.to_q.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[3].attn.to_q.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].attn.to_q._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[3].attn.to_q._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].attn.to_q._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].attn.to_q._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].attn.to_q._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[3].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[3].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].attn.to_v, accessed_by=DictGetItemGuardAccessor(to_v) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[3].attn.to_v, 140533115341888) # value = attn.to_v(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1718 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].attn.to_v.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[3].attn.to_v.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].attn.to_v.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[3].attn.to_v.training, 140591004393408) # value = attn.to_v(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1718 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].attn.to_v._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].attn.to_v.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[3].attn.to_v.lora_A, 140533115340880) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].attn.to_v.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].attn.to_v.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[3].attn.to_v.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].attn.to_v.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[3].attn.to_v.lora_A['default_0'], 140533115345344) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].attn.to_v.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].attn.to_v.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[3].attn.to_v.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].attn.to_v.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].attn.to_v.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[3].attn.to_v.lora_A['default_0'].weight, 140526553673264) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].attn.to_v.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[3].attn.to_v.lora_B, 140533115344048) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].attn.to_v.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].attn.to_v.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[3].attn.to_v.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].attn.to_v.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[3].attn.to_v.lora_B['default_0'], 140533115343712) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].attn.to_v.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].attn.to_v.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[3].attn.to_v.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].attn.to_v.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[3].attn.to_v.base_layer, 140581770189248) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].attn.to_v.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].attn.to_v.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[3].attn.to_v.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].attn.to_v.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[3].attn.to_v.lora_dropout, 140533115346064) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].attn.to_v.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].attn.to_v.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[3].attn.to_v.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].attn.to_v.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[3].attn.to_v.lora_dropout['default_0'], 140533115344192) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].attn.to_v.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].attn.to_v.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[3].attn.to_v.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].attn.to_v.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[3].attn.to_v.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[3].attn.to_v.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].attn.to_v.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[3].attn.to_v.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].attn.to_v.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[3].attn.to_v.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[3].attn.to_v.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].attn.to_v.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[3].attn.to_v.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].attn.to_v._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].attn.to_v._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].attn.to_v.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[3].attn.to_v.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[3].attn.to_v.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].attn.to_v._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[3].attn.to_v._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].attn.to_v._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].attn.to_v._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].attn.to_v._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[3].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[3].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].attn.norm_k, accessed_by=DictGetItemGuardAccessor(norm_k) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[3].attn.norm_k, 140581770189152) # if attn.norm_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1729 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].attn.norm_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[3].attn.norm_k.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].attn.norm_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[3].attn.norm_k.training, 140591004393440) # if attn.norm_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1729 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].attn.norm_k.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[3].attn.norm_k.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].attn.norm_k._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].attn.norm_k.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[3].attn.norm_k.weight, 140581772786432) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].attn.norm_k._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].attn.norm_k._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].attn.norm_k._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].attn.norm_k._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].attn.norm_q, accessed_by=DictGetItemGuardAccessor(norm_q) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[3].attn.norm_q, 140581770189008) # if attn.norm_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1727 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].attn.norm_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[3].attn.norm_q.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].attn.norm_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[3].attn.norm_q.training, 140591004393440) # if attn.norm_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1727 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].attn.norm_q.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[3].attn.norm_q.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].attn.norm_q._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].attn.norm_q.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[3].attn.norm_q.weight, 140581772783632) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].attn.norm_q._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].attn.norm_q._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].attn.norm_q._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].attn.norm_q._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].attn.heads, accessed_by=DictGetItemGuardAccessor(heads) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[3].attn.heads == 24 # head_dim = inner_dim // attn.heads # diffusers/src/diffusers/models/attention_processor.py:1721 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].attn.processor, accessed_by=DictGetItemGuardAccessor(processor) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[3].attn.processor, 93831581524080) # attn_parameters = set(inspect.signature(self.processor.__call__).parameters.keys()) # diffusers/src/diffusers/models/attention_processor.py:479 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[3].attn.processor, 140581770188912) # return self.processor( # diffusers/src/diffusers/models/attention_processor.py:490 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].attn._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].attn._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].attn._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].attn._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].attn.forward, accessed_by=GetAttrGuardAccessor(forward) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].attn.forward, accessed_by=FuncDefaultsGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].attn.forward.__defaults__[0], accessed_by=GetItemGuardAccessor(0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[3].attn.forward.__defaults__[0], 140591004478624) # batch_size, _, _ = hidden_states.shape if encoder_hidden_states is None else encoder_hidden_states.shape # diffusers/src/diffusers/models/attention_processor.py:1713 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].norm, accessed_by=DictGetItemGuardAccessor(norm) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[3].norm, 140581770188576) # norm_hidden_states, gate = self.norm(hidden_states, emb=temb) # diffusers/src/diffusers/models/transformers/transformer_flux.py:88 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].norm.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[3].norm.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].norm.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[3].norm.training, 140591004393440) # norm_hidden_states, gate = self.norm(hidden_states, emb=temb) # diffusers/src/diffusers/models/transformers/transformer_flux.py:88 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].norm._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].norm.norm, accessed_by=DictGetItemGuardAccessor(norm) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[3].norm.norm, 140581770188720) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:171 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].norm.norm.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].norm.norm.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[3].norm.norm.training, 140591004393440) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:171 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].norm.silu, accessed_by=DictGetItemGuardAccessor(silu) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[3].norm.silu, 140581770188624) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].norm.silu.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].norm.silu.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[3].norm.silu.training, 140591004393440) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].norm.linear, accessed_by=DictGetItemGuardAccessor(linear) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[3].norm.linear, 140533117469456) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].norm.linear.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[3].norm.linear.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].norm.linear.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[3].norm.linear.training, 140591004393408) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].norm.linear._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].norm.linear.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[3].norm.linear.lora_A, 140533117475360) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].norm.linear.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].norm.linear.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[3].norm.linear.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].norm.linear.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[3].norm.linear.lora_A['default_0'], 140533117482800) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].norm.linear.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].norm.linear.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[3].norm.linear.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].norm.linear.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].norm.linear.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[3].norm.linear.lora_A['default_0'].weight, 140526663204624) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].norm.linear.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[3].norm.linear.lora_B, 140533117476704) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].norm.linear.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].norm.linear.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[3].norm.linear.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].norm.linear.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[3].norm.linear.lora_B['default_0'], 140533117480832) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].norm.linear.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].norm.linear.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[3].norm.linear.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].norm.linear.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[3].norm.linear.base_layer, 140581770188672) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].norm.linear.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].norm.linear.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[3].norm.linear.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].norm.linear.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[3].norm.linear.lora_dropout, 140533117472768) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].norm.linear.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].norm.linear.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[3].norm.linear.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].norm.linear.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[3].norm.linear.lora_dropout['default_0'], 140533117473536) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].norm.linear.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].norm.linear.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[3].norm.linear.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].norm.linear.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[3].norm.linear.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[3].norm.linear.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].norm.linear.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[3].norm.linear.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].norm.linear.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[3].norm.linear.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[3].norm.linear.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].norm.linear.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[3].norm.linear.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].norm.linear._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].norm.linear._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].norm.linear.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[3].norm.linear.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[3].norm.linear.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].norm.linear._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[3].norm.linear._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].norm.linear._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].norm.linear._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].norm.linear._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[3].norm.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[3].norm.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].norm._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].norm._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].norm._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].norm._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].act_mlp, accessed_by=DictGetItemGuardAccessor(act_mlp) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[3].act_mlp, 140581770188816) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].act_mlp.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].act_mlp.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[3].act_mlp.training, 140591004393440) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].proj_mlp, accessed_by=DictGetItemGuardAccessor(proj_mlp) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[3].proj_mlp, 140533117474832) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].proj_mlp.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[3].proj_mlp.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].proj_mlp.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[3].proj_mlp.training, 140591004393408) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].proj_mlp._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].proj_mlp.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[3].proj_mlp.lora_A, 140533117475024) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].proj_mlp.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].proj_mlp.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[3].proj_mlp.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].proj_mlp.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[3].proj_mlp.lora_A['default_0'], 140533117480592) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].proj_mlp.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].proj_mlp.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[3].proj_mlp.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].proj_mlp.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].proj_mlp.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[3].proj_mlp.lora_A['default_0'].weight, 140526663190864) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].proj_mlp.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[3].proj_mlp.lora_B, 140533117470704) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].proj_mlp.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].proj_mlp.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[3].proj_mlp.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].proj_mlp.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[3].proj_mlp.lora_B['default_0'], 140533117476512) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].proj_mlp.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].proj_mlp.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[3].proj_mlp.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].proj_mlp.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[3].proj_mlp.base_layer, 140581770188768) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].proj_mlp.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].proj_mlp.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[3].proj_mlp.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].proj_mlp.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[3].proj_mlp.lora_dropout, 140533117470656) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].proj_mlp.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].proj_mlp.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[3].proj_mlp.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].proj_mlp.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[3].proj_mlp.lora_dropout['default_0'], 140533117478240) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].proj_mlp.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].proj_mlp.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[3].proj_mlp.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].proj_mlp.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[3].proj_mlp.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[3].proj_mlp.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].proj_mlp.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[3].proj_mlp.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].proj_mlp.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[3].proj_mlp.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[3].proj_mlp.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].proj_mlp.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[3].proj_mlp.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].proj_mlp._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].proj_mlp._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].proj_mlp.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[3].proj_mlp.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[3].proj_mlp.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].proj_mlp._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[3].proj_mlp._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].proj_mlp._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].proj_mlp._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].proj_mlp._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[3].proj_mlp._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[3].proj_mlp._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].proj_out, accessed_by=DictGetItemGuardAccessor(proj_out) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[3].proj_out, 140533117474640) # hidden_states = gate * self.proj_out(hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:98 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].proj_out.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[3].proj_out.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].proj_out.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[3].proj_out.training, 140591004393408) # hidden_states = gate * self.proj_out(hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:98 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].proj_out._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].proj_out.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[3].proj_out.lora_A, 140533117480448) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].proj_out.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].proj_out.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[3].proj_out.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].proj_out.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[3].proj_out.lora_A['default_0'], 140533115124512) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].proj_out.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].proj_out.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[3].proj_out.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].proj_out.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].proj_out.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[3].proj_out.lora_A['default_0'].weight, 140533117600288) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].proj_out.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[3].proj_out.lora_B, 140533117476320) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].proj_out.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].proj_out.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[3].proj_out.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].proj_out.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[3].proj_out.lora_B['default_0'], 140533115124992) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].proj_out.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].proj_out.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[3].proj_out.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].proj_out.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[3].proj_out.base_layer, 140581770188864) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].proj_out.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].proj_out.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[3].proj_out.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].proj_out.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[3].proj_out.lora_dropout, 140533117481408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].proj_out.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].proj_out.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[3].proj_out.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].proj_out.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[3].proj_out.lora_dropout['default_0'], 140533117473680) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].proj_out.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].proj_out.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[3].proj_out.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].proj_out.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[3].proj_out.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[3].proj_out.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].proj_out.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[3].proj_out.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].proj_out.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[3].proj_out.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[3].proj_out.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].proj_out.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[3].proj_out.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].proj_out._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].proj_out._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].proj_out.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[3].proj_out.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[3].proj_out.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].proj_out._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[3].proj_out._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].proj_out._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].proj_out._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3].proj_out._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[3].proj_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[3].proj_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[3]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4], accessed_by=GetItemGuardAccessor(4) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[4], 140581770188528) # for index_block, block in enumerate(self.single_transformer_blocks): # diffusers/src/diffusers/models/transformers/transformer_flux.py:509 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[4].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[4].training, 140591004393440) # for index_block, block in enumerate(self.single_transformer_blocks): # diffusers/src/diffusers/models/transformers/transformer_flux.py:509 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].attn, accessed_by=DictGetItemGuardAccessor(attn) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[4].attn, 140581770189728) # attn_output = self.attn( # diffusers/src/diffusers/models/transformers/transformer_flux.py:91 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].attn.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[4].attn.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].attn.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[4].attn.training, 140591004393440) # attn_output = self.attn( # diffusers/src/diffusers/models/transformers/transformer_flux.py:91 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].attn._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].attn.to_k, accessed_by=DictGetItemGuardAccessor(to_k) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[4].attn.to_k, 140533116536720) # key = attn.to_k(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1717 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].attn.to_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[4].attn.to_k.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].attn.to_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[4].attn.to_k.training, 140591004393408) # key = attn.to_k(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1717 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].attn.to_k._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].attn.to_k.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[4].attn.to_k.lora_A, 140533116536432) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].attn.to_k.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].attn.to_k.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[4].attn.to_k.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].attn.to_k.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[4].attn.to_k.lora_A['default_0'], 140533113991472) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].attn.to_k.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].attn.to_k.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[4].attn.to_k.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].attn.to_k.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].attn.to_k.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[4].attn.to_k.lora_A['default_0'].weight, 140526553676624) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].attn.to_k.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[4].attn.to_k.lora_B, 140533116549680) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].attn.to_k.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].attn.to_k.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[4].attn.to_k.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].attn.to_k.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[4].attn.to_k.lora_B['default_0'], 140533113992816) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].attn.to_k.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].attn.to_k.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[4].attn.to_k.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].attn.to_k.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[4].attn.to_k.base_layer, 140581770189872) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].attn.to_k.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].attn.to_k.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[4].attn.to_k.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].attn.to_k.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[4].attn.to_k.lora_dropout, 140533116549632) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].attn.to_k.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].attn.to_k.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[4].attn.to_k.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].attn.to_k.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[4].attn.to_k.lora_dropout['default_0'], 140533116535136) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].attn.to_k.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].attn.to_k.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[4].attn.to_k.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].attn.to_k.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[4].attn.to_k.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[4].attn.to_k.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].attn.to_k.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[4].attn.to_k.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].attn.to_k.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[4].attn.to_k.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[4].attn.to_k.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].attn.to_k.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[4].attn.to_k.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].attn.to_k._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].attn.to_k._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].attn.to_k.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[4].attn.to_k.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[4].attn.to_k.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].attn.to_k._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[4].attn.to_k._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].attn.to_k._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].attn.to_k._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].attn.to_k._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[4].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[4].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].attn.to_q, accessed_by=DictGetItemGuardAccessor(to_q) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[4].attn.to_q, 140533113858576) # query = attn.to_q(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1716 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].attn.to_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[4].attn.to_q.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].attn.to_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[4].attn.to_q.training, 140591004393408) # query = attn.to_q(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1716 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].attn.to_q._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].attn.to_q.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[4].attn.to_q.lora_A, 140533114149264) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].attn.to_q.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].attn.to_q.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[4].attn.to_q.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].attn.to_q.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[4].attn.to_q.lora_A['default_0'], 140533116540368) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].attn.to_q.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].attn.to_q.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[4].attn.to_q.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].attn.to_q.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].attn.to_q.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[4].attn.to_q.lora_A['default_0'].weight, 140526553661984) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].attn.to_q.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[4].attn.to_q.lora_B, 140533115467616) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].attn.to_q.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].attn.to_q.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[4].attn.to_q.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].attn.to_q.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[4].attn.to_q.lora_B['default_0'], 140533116535040) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].attn.to_q.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].attn.to_q.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[4].attn.to_q.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].attn.to_q.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[4].attn.to_q.base_layer, 140581770189968) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].attn.to_q.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].attn.to_q.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[4].attn.to_q.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].attn.to_q.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[4].attn.to_q.lora_dropout, 140533114154352) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].attn.to_q.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].attn.to_q.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[4].attn.to_q.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].attn.to_q.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[4].attn.to_q.lora_dropout['default_0'], 140533114156944) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].attn.to_q.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].attn.to_q.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[4].attn.to_q.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].attn.to_q.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[4].attn.to_q.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[4].attn.to_q.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].attn.to_q.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[4].attn.to_q.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].attn.to_q.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[4].attn.to_q.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[4].attn.to_q.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].attn.to_q.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[4].attn.to_q.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].attn.to_q._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].attn.to_q._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].attn.to_q.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[4].attn.to_q.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[4].attn.to_q.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].attn.to_q._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[4].attn.to_q._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].attn.to_q._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].attn.to_q._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].attn.to_q._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[4].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[4].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].attn.to_v, accessed_by=DictGetItemGuardAccessor(to_v) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[4].attn.to_v, 140533113982832) # value = attn.to_v(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1718 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].attn.to_v.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[4].attn.to_v.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].attn.to_v.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[4].attn.to_v.training, 140591004393408) # value = attn.to_v(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1718 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].attn.to_v._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].attn.to_v.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[4].attn.to_v.lora_A, 140533115495728) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].attn.to_v.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].attn.to_v.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[4].attn.to_v.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].attn.to_v.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[4].attn.to_v.lora_A['default_0'], 140533115573424) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].attn.to_v.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].attn.to_v.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[4].attn.to_v.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].attn.to_v.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].attn.to_v.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[4].attn.to_v.lora_A['default_0'].weight, 140526553665664) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].attn.to_v.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[4].attn.to_v.lora_B, 140533115490208) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].attn.to_v.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].attn.to_v.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[4].attn.to_v.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].attn.to_v.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[4].attn.to_v.lora_B['default_0'], 140533115576976) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].attn.to_v.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].attn.to_v.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[4].attn.to_v.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].attn.to_v.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[4].attn.to_v.base_layer, 140581770190016) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].attn.to_v.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].attn.to_v.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[4].attn.to_v.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].attn.to_v.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[4].attn.to_v.lora_dropout, 140533115488000) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].attn.to_v.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].attn.to_v.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[4].attn.to_v.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].attn.to_v.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[4].attn.to_v.lora_dropout['default_0'], 140533115496016) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].attn.to_v.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].attn.to_v.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[4].attn.to_v.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].attn.to_v.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[4].attn.to_v.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[4].attn.to_v.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].attn.to_v.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[4].attn.to_v.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].attn.to_v.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[4].attn.to_v.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[4].attn.to_v.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].attn.to_v.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[4].attn.to_v.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].attn.to_v._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].attn.to_v._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].attn.to_v.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[4].attn.to_v.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[4].attn.to_v.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].attn.to_v._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[4].attn.to_v._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].attn.to_v._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].attn.to_v._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].attn.to_v._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[4].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[4].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].attn.norm_k, accessed_by=DictGetItemGuardAccessor(norm_k) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[4].attn.norm_k, 140581770189920) # if attn.norm_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1729 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].attn.norm_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[4].attn.norm_k.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].attn.norm_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[4].attn.norm_k.training, 140591004393440) # if attn.norm_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1729 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].attn.norm_k.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[4].attn.norm_k.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].attn.norm_k._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].attn.norm_k.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[4].attn.norm_k.weight, 140581772771952) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].attn.norm_k._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].attn.norm_k._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].attn.norm_k._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].attn.norm_k._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].attn.norm_q, accessed_by=DictGetItemGuardAccessor(norm_q) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[4].attn.norm_q, 140581770189776) # if attn.norm_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1727 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].attn.norm_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[4].attn.norm_q.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].attn.norm_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[4].attn.norm_q.training, 140591004393440) # if attn.norm_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1727 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].attn.norm_q.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[4].attn.norm_q.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].attn.norm_q._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].attn.norm_q.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[4].attn.norm_q.weight, 140581783349632) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].attn.norm_q._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].attn.norm_q._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].attn.norm_q._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].attn.norm_q._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].attn.heads, accessed_by=DictGetItemGuardAccessor(heads) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[4].attn.heads == 24 # head_dim = inner_dim // attn.heads # diffusers/src/diffusers/models/attention_processor.py:1721 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].attn.processor, accessed_by=DictGetItemGuardAccessor(processor) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[4].attn.processor, 93831581524080) # attn_parameters = set(inspect.signature(self.processor.__call__).parameters.keys()) # diffusers/src/diffusers/models/attention_processor.py:479 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[4].attn.processor, 140581770189680) # return self.processor( # diffusers/src/diffusers/models/attention_processor.py:490 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].attn._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].attn._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].attn._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].attn._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].attn.forward, accessed_by=GetAttrGuardAccessor(forward) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].attn.forward, accessed_by=FuncDefaultsGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].attn.forward.__defaults__[0], accessed_by=GetItemGuardAccessor(0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[4].attn.forward.__defaults__[0], 140591004478624) # batch_size, _, _ = hidden_states.shape if encoder_hidden_states is None else encoder_hidden_states.shape # diffusers/src/diffusers/models/attention_processor.py:1713 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].norm, accessed_by=DictGetItemGuardAccessor(norm) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[4].norm, 140581770189344) # norm_hidden_states, gate = self.norm(hidden_states, emb=temb) # diffusers/src/diffusers/models/transformers/transformer_flux.py:88 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].norm.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[4].norm.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].norm.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[4].norm.training, 140591004393440) # norm_hidden_states, gate = self.norm(hidden_states, emb=temb) # diffusers/src/diffusers/models/transformers/transformer_flux.py:88 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].norm._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].norm.norm, accessed_by=DictGetItemGuardAccessor(norm) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[4].norm.norm, 140581770189488) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:171 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].norm.norm.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].norm.norm.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[4].norm.norm.training, 140591004393440) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:171 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].norm.silu, accessed_by=DictGetItemGuardAccessor(silu) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[4].norm.silu, 140581770189392) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].norm.silu.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].norm.silu.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[4].norm.silu.training, 140591004393440) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].norm.linear, accessed_by=DictGetItemGuardAccessor(linear) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[4].norm.linear, 140533115348656) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].norm.linear.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[4].norm.linear.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].norm.linear.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[4].norm.linear.training, 140591004393408) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].norm.linear._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].norm.linear.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[4].norm.linear.lora_A, 140533115344384) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].norm.linear.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].norm.linear.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[4].norm.linear.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].norm.linear.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[4].norm.linear.lora_A['default_0'], 140533115348128) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].norm.linear.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].norm.linear.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[4].norm.linear.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].norm.linear.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].norm.linear.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[4].norm.linear.lora_A['default_0'].weight, 140526553667184) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].norm.linear.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[4].norm.linear.lora_B, 140533115345776) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].norm.linear.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].norm.linear.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[4].norm.linear.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].norm.linear.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[4].norm.linear.lora_B['default_0'], 140533115350384) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].norm.linear.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].norm.linear.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[4].norm.linear.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].norm.linear.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[4].norm.linear.base_layer, 140581770189440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].norm.linear.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].norm.linear.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[4].norm.linear.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].norm.linear.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[4].norm.linear.lora_dropout, 140533115349232) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].norm.linear.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].norm.linear.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[4].norm.linear.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].norm.linear.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[4].norm.linear.lora_dropout['default_0'], 140533115348704) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].norm.linear.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].norm.linear.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[4].norm.linear.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].norm.linear.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[4].norm.linear.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[4].norm.linear.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].norm.linear.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[4].norm.linear.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].norm.linear.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[4].norm.linear.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[4].norm.linear.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].norm.linear.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[4].norm.linear.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].norm.linear._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].norm.linear._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].norm.linear.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[4].norm.linear.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[4].norm.linear.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].norm.linear._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[4].norm.linear._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].norm.linear._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].norm.linear._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].norm.linear._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[4].norm.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[4].norm.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].norm._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].norm._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].norm._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].norm._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].act_mlp, accessed_by=DictGetItemGuardAccessor(act_mlp) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[4].act_mlp, 140581770189584) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].act_mlp.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].act_mlp.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[4].act_mlp.training, 140591004393440) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].proj_mlp, accessed_by=DictGetItemGuardAccessor(proj_mlp) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[4].proj_mlp, 140533115349664) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].proj_mlp.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[4].proj_mlp.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].proj_mlp.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[4].proj_mlp.training, 140591004393408) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].proj_mlp._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].proj_mlp.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[4].proj_mlp.lora_A, 140533115343040) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].proj_mlp.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].proj_mlp.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[4].proj_mlp.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].proj_mlp.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[4].proj_mlp.lora_A['default_0'], 140533115352064) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].proj_mlp.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].proj_mlp.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[4].proj_mlp.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].proj_mlp.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].proj_mlp.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[4].proj_mlp.lora_A['default_0'].weight, 140526553675104) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].proj_mlp.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[4].proj_mlp.lora_B, 140533115343424) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].proj_mlp.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].proj_mlp.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[4].proj_mlp.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].proj_mlp.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[4].proj_mlp.lora_B['default_0'], 140533115341216) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].proj_mlp.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].proj_mlp.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[4].proj_mlp.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].proj_mlp.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[4].proj_mlp.base_layer, 140581770189536) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].proj_mlp.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].proj_mlp.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[4].proj_mlp.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].proj_mlp.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[4].proj_mlp.lora_dropout, 140533115339968) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].proj_mlp.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].proj_mlp.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[4].proj_mlp.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].proj_mlp.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[4].proj_mlp.lora_dropout['default_0'], 140533115349904) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].proj_mlp.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].proj_mlp.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[4].proj_mlp.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].proj_mlp.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[4].proj_mlp.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[4].proj_mlp.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].proj_mlp.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[4].proj_mlp.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].proj_mlp.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[4].proj_mlp.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[4].proj_mlp.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].proj_mlp.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[4].proj_mlp.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].proj_mlp._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].proj_mlp._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].proj_mlp.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[4].proj_mlp.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[4].proj_mlp.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].proj_mlp._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[4].proj_mlp._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].proj_mlp._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].proj_mlp._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].proj_mlp._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[4].proj_mlp._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[4].proj_mlp._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].proj_out, accessed_by=DictGetItemGuardAccessor(proj_out) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[4].proj_out, 140533115351008) # hidden_states = gate * self.proj_out(hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:98 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].proj_out.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[4].proj_out.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].proj_out.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[4].proj_out.training, 140591004393408) # hidden_states = gate * self.proj_out(hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:98 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].proj_out._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].proj_out.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[4].proj_out.lora_A, 140533115441872) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].proj_out.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].proj_out.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[4].proj_out.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].proj_out.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[4].proj_out.lora_A['default_0'], 140533115444032) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].proj_out.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].proj_out.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[4].proj_out.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].proj_out.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].proj_out.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[4].proj_out.lora_A['default_0'].weight, 140526553674144) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].proj_out.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[4].proj_out.lora_B, 140533115449408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].proj_out.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].proj_out.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[4].proj_out.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].proj_out.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[4].proj_out.lora_B['default_0'], 140533115445184) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].proj_out.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].proj_out.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[4].proj_out.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].proj_out.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[4].proj_out.base_layer, 140581770189632) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].proj_out.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].proj_out.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[4].proj_out.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].proj_out.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[4].proj_out.lora_dropout, 140533115442064) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].proj_out.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].proj_out.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[4].proj_out.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].proj_out.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[4].proj_out.lora_dropout['default_0'], 140533115443840) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].proj_out.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].proj_out.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[4].proj_out.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].proj_out.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[4].proj_out.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[4].proj_out.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].proj_out.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[4].proj_out.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].proj_out.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[4].proj_out.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[4].proj_out.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].proj_out.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[4].proj_out.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].proj_out._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].proj_out._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].proj_out.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[4].proj_out.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[4].proj_out.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].proj_out._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[4].proj_out._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].proj_out._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].proj_out._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4].proj_out._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[4].proj_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[4].proj_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[4]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5], accessed_by=GetItemGuardAccessor(5) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[5], 140581770189296) # for index_block, block in enumerate(self.single_transformer_blocks): # diffusers/src/diffusers/models/transformers/transformer_flux.py:509 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[5].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[5].training, 140591004393440) # for index_block, block in enumerate(self.single_transformer_blocks): # diffusers/src/diffusers/models/transformers/transformer_flux.py:509 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].attn, accessed_by=DictGetItemGuardAccessor(attn) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[5].attn, 140581770190496) # attn_output = self.attn( # diffusers/src/diffusers/models/transformers/transformer_flux.py:91 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].attn.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[5].attn.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].attn.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[5].attn.training, 140591004393440) # attn_output = self.attn( # diffusers/src/diffusers/models/transformers/transformer_flux.py:91 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].attn._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].attn.to_k, accessed_by=DictGetItemGuardAccessor(to_k) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[5].attn.to_k, 140533113727408) # key = attn.to_k(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1717 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].attn.to_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[5].attn.to_k.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].attn.to_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[5].attn.to_k.training, 140591004393408) # key = attn.to_k(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1717 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].attn.to_k._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].attn.to_k.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[5].attn.to_k.lora_A, 140533113729376) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].attn.to_k.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].attn.to_k.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[5].attn.to_k.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].attn.to_k.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[5].attn.to_k.lora_A['default_0'], 140533113718384) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].attn.to_k.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].attn.to_k.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[5].attn.to_k.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].attn.to_k.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].attn.to_k.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[5].attn.to_k.lora_A['default_0'].weight, 140526770031008) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].attn.to_k.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[5].attn.to_k.lora_B, 140533113719056) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].attn.to_k.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].attn.to_k.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[5].attn.to_k.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].attn.to_k.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[5].attn.to_k.lora_B['default_0'], 140533113726976) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].attn.to_k.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].attn.to_k.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[5].attn.to_k.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].attn.to_k.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[5].attn.to_k.base_layer, 140581770190640) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].attn.to_k.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].attn.to_k.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[5].attn.to_k.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].attn.to_k.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[5].attn.to_k.lora_dropout, 140533113731776) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].attn.to_k.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].attn.to_k.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[5].attn.to_k.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].attn.to_k.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[5].attn.to_k.lora_dropout['default_0'], 140533113724144) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].attn.to_k.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].attn.to_k.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[5].attn.to_k.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].attn.to_k.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[5].attn.to_k.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[5].attn.to_k.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].attn.to_k.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[5].attn.to_k.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].attn.to_k.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[5].attn.to_k.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[5].attn.to_k.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].attn.to_k.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[5].attn.to_k.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].attn.to_k._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].attn.to_k._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].attn.to_k.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[5].attn.to_k.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[5].attn.to_k.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].attn.to_k._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[5].attn.to_k._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].attn.to_k._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].attn.to_k._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].attn.to_k._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[5].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[5].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].attn.to_q, accessed_by=DictGetItemGuardAccessor(to_q) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[5].attn.to_q, 140533113731728) # query = attn.to_q(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1716 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].attn.to_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[5].attn.to_q.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].attn.to_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[5].attn.to_q.training, 140591004393408) # query = attn.to_q(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1716 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].attn.to_q._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].attn.to_q.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[5].attn.to_q.lora_A, 140533113718624) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].attn.to_q.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].attn.to_q.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[5].attn.to_q.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].attn.to_q.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[5].attn.to_q.lora_A['default_0'], 140533113731440) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].attn.to_q.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].attn.to_q.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[5].attn.to_q.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].attn.to_q.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].attn.to_q.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[5].attn.to_q.lora_A['default_0'].weight, 140526770040928) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].attn.to_q.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[5].attn.to_q.lora_B, 140533113718336) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].attn.to_q.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].attn.to_q.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[5].attn.to_q.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].attn.to_q.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[5].attn.to_q.lora_B['default_0'], 140533113731488) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].attn.to_q.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].attn.to_q.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[5].attn.to_q.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].attn.to_q.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[5].attn.to_q.base_layer, 140581770190736) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].attn.to_q.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].attn.to_q.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[5].attn.to_q.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].attn.to_q.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[5].attn.to_q.lora_dropout, 140533113729424) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].attn.to_q.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].attn.to_q.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[5].attn.to_q.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].attn.to_q.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[5].attn.to_q.lora_dropout['default_0'], 140533113724192) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].attn.to_q.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].attn.to_q.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[5].attn.to_q.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].attn.to_q.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[5].attn.to_q.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[5].attn.to_q.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].attn.to_q.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[5].attn.to_q.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].attn.to_q.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[5].attn.to_q.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[5].attn.to_q.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].attn.to_q.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[5].attn.to_q.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].attn.to_q._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].attn.to_q._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].attn.to_q.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[5].attn.to_q.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[5].attn.to_q.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].attn.to_q._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[5].attn.to_q._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].attn.to_q._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].attn.to_q._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].attn.to_q._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[5].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[5].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].attn.to_v, accessed_by=DictGetItemGuardAccessor(to_v) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[5].attn.to_v, 140533114276688) # value = attn.to_v(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1718 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].attn.to_v.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[5].attn.to_v.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].attn.to_v.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[5].attn.to_v.training, 140591004393408) # value = attn.to_v(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1718 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].attn.to_v._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].attn.to_v.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[5].attn.to_v.lora_A, 140533114274288) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].attn.to_v.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].attn.to_v.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[5].attn.to_v.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].attn.to_v.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[5].attn.to_v.lora_A['default_0'], 140533114273904) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].attn.to_v.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].attn.to_v.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[5].attn.to_v.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].attn.to_v.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].attn.to_v.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[5].attn.to_v.lora_A['default_0'].weight, 140537655632304) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].attn.to_v.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[5].attn.to_v.lora_B, 140533114275920) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].attn.to_v.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].attn.to_v.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[5].attn.to_v.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].attn.to_v.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[5].attn.to_v.lora_B['default_0'], 140533114275872) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].attn.to_v.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].attn.to_v.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[5].attn.to_v.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].attn.to_v.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[5].attn.to_v.base_layer, 140581770190784) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].attn.to_v.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].attn.to_v.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[5].attn.to_v.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].attn.to_v.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[5].attn.to_v.lora_dropout, 140533114275488) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].attn.to_v.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].attn.to_v.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[5].attn.to_v.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].attn.to_v.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[5].attn.to_v.lora_dropout['default_0'], 140533114276448) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].attn.to_v.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].attn.to_v.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[5].attn.to_v.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].attn.to_v.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[5].attn.to_v.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[5].attn.to_v.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].attn.to_v.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[5].attn.to_v.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].attn.to_v.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[5].attn.to_v.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[5].attn.to_v.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].attn.to_v.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[5].attn.to_v.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].attn.to_v._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].attn.to_v._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].attn.to_v.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[5].attn.to_v.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[5].attn.to_v.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].attn.to_v._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[5].attn.to_v._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].attn.to_v._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].attn.to_v._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].attn.to_v._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[5].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[5].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].attn.norm_k, accessed_by=DictGetItemGuardAccessor(norm_k) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[5].attn.norm_k, 140581770190688) # if attn.norm_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1729 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].attn.norm_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[5].attn.norm_k.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].attn.norm_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[5].attn.norm_k.training, 140591004393440) # if attn.norm_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1729 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].attn.norm_k.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[5].attn.norm_k.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].attn.norm_k._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].attn.norm_k.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[5].attn.norm_k.weight, 140581772502624) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].attn.norm_k._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].attn.norm_k._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].attn.norm_k._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].attn.norm_k._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].attn.norm_q, accessed_by=DictGetItemGuardAccessor(norm_q) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[5].attn.norm_q, 140581770190544) # if attn.norm_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1727 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].attn.norm_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[5].attn.norm_q.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].attn.norm_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[5].attn.norm_q.training, 140591004393440) # if attn.norm_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1727 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].attn.norm_q.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[5].attn.norm_q.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].attn.norm_q._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].attn.norm_q.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[5].attn.norm_q.weight, 140581783352032) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].attn.norm_q._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].attn.norm_q._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].attn.norm_q._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].attn.norm_q._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].attn.heads, accessed_by=DictGetItemGuardAccessor(heads) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[5].attn.heads == 24 # head_dim = inner_dim // attn.heads # diffusers/src/diffusers/models/attention_processor.py:1721 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].attn.processor, accessed_by=DictGetItemGuardAccessor(processor) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[5].attn.processor, 93831581524080) # attn_parameters = set(inspect.signature(self.processor.__call__).parameters.keys()) # diffusers/src/diffusers/models/attention_processor.py:479 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[5].attn.processor, 140581770190448) # return self.processor( # diffusers/src/diffusers/models/attention_processor.py:490 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].attn._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].attn._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].attn._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].attn._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].attn.forward, accessed_by=GetAttrGuardAccessor(forward) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].attn.forward, accessed_by=FuncDefaultsGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].attn.forward.__defaults__[0], accessed_by=GetItemGuardAccessor(0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[5].attn.forward.__defaults__[0], 140591004478624) # batch_size, _, _ = hidden_states.shape if encoder_hidden_states is None else encoder_hidden_states.shape # diffusers/src/diffusers/models/attention_processor.py:1713 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].norm, accessed_by=DictGetItemGuardAccessor(norm) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[5].norm, 140581770190112) # norm_hidden_states, gate = self.norm(hidden_states, emb=temb) # diffusers/src/diffusers/models/transformers/transformer_flux.py:88 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].norm.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[5].norm.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].norm.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[5].norm.training, 140591004393440) # norm_hidden_states, gate = self.norm(hidden_states, emb=temb) # diffusers/src/diffusers/models/transformers/transformer_flux.py:88 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].norm._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].norm.norm, accessed_by=DictGetItemGuardAccessor(norm) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[5].norm.norm, 140581770190256) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:171 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].norm.norm.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].norm.norm.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[5].norm.norm.training, 140591004393440) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:171 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].norm.silu, accessed_by=DictGetItemGuardAccessor(silu) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[5].norm.silu, 140581770190160) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].norm.silu.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].norm.silu.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[5].norm.silu.training, 140591004393440) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].norm.linear, accessed_by=DictGetItemGuardAccessor(linear) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[5].norm.linear, 140533115581872) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].norm.linear.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[5].norm.linear.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].norm.linear.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[5].norm.linear.training, 140591004393408) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].norm.linear._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].norm.linear.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[5].norm.linear.lora_A, 140533115580192) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].norm.linear.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].norm.linear.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[5].norm.linear.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].norm.linear.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[5].norm.linear.lora_A['default_0'], 140533115579280) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].norm.linear.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].norm.linear.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[5].norm.linear.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].norm.linear.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].norm.linear.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[5].norm.linear.lora_A['default_0'].weight, 140526770029888) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].norm.linear.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[5].norm.linear.lora_B, 140533115582496) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].norm.linear.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].norm.linear.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[5].norm.linear.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].norm.linear.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[5].norm.linear.lora_B['default_0'], 140533115581248) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].norm.linear.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].norm.linear.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[5].norm.linear.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].norm.linear.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[5].norm.linear.base_layer, 140581770190208) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].norm.linear.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].norm.linear.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[5].norm.linear.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].norm.linear.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[5].norm.linear.lora_dropout, 140533115584464) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].norm.linear.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].norm.linear.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[5].norm.linear.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].norm.linear.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[5].norm.linear.lora_dropout['default_0'], 140533115583024) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].norm.linear.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].norm.linear.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[5].norm.linear.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].norm.linear.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[5].norm.linear.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[5].norm.linear.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].norm.linear.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[5].norm.linear.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].norm.linear.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[5].norm.linear.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[5].norm.linear.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].norm.linear.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[5].norm.linear.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].norm.linear._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].norm.linear._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].norm.linear.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[5].norm.linear.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[5].norm.linear.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].norm.linear._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[5].norm.linear._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].norm.linear._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].norm.linear._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].norm.linear._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[5].norm.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[5].norm.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].norm._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].norm._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].norm._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].norm._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].act_mlp, accessed_by=DictGetItemGuardAccessor(act_mlp) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[5].act_mlp, 140581770190352) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].act_mlp.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].act_mlp.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[5].act_mlp.training, 140591004393440) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].proj_mlp, accessed_by=DictGetItemGuardAccessor(proj_mlp) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[5].proj_mlp, 140533115430384) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].proj_mlp.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[5].proj_mlp.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].proj_mlp.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[5].proj_mlp.training, 140591004393408) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].proj_mlp._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].proj_mlp.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[5].proj_mlp.lora_A, 140533115433264) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].proj_mlp.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].proj_mlp.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[5].proj_mlp.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].proj_mlp.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[5].proj_mlp.lora_A['default_0'], 140533113727696) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].proj_mlp.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].proj_mlp.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[5].proj_mlp.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].proj_mlp.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].proj_mlp.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[5].proj_mlp.lora_A['default_0'].weight, 140526770041008) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].proj_mlp.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[5].proj_mlp.lora_B, 140533115422848) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].proj_mlp.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].proj_mlp.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[5].proj_mlp.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].proj_mlp.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[5].proj_mlp.lora_B['default_0'], 140533113730336) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].proj_mlp.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].proj_mlp.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[5].proj_mlp.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].proj_mlp.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[5].proj_mlp.base_layer, 140581770190304) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].proj_mlp.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].proj_mlp.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[5].proj_mlp.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].proj_mlp.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[5].proj_mlp.lora_dropout, 140533115427552) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].proj_mlp.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].proj_mlp.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[5].proj_mlp.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].proj_mlp.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[5].proj_mlp.lora_dropout['default_0'], 140533115423808) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].proj_mlp.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].proj_mlp.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[5].proj_mlp.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].proj_mlp.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[5].proj_mlp.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[5].proj_mlp.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].proj_mlp.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[5].proj_mlp.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].proj_mlp.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[5].proj_mlp.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[5].proj_mlp.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].proj_mlp.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[5].proj_mlp.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].proj_mlp._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].proj_mlp._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].proj_mlp.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[5].proj_mlp.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[5].proj_mlp.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].proj_mlp._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[5].proj_mlp._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].proj_mlp._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].proj_mlp._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].proj_mlp._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[5].proj_mlp._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[5].proj_mlp._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].proj_out, accessed_by=DictGetItemGuardAccessor(proj_out) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[5].proj_out, 140533113719296) # hidden_states = gate * self.proj_out(hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:98 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].proj_out.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[5].proj_out.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].proj_out.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[5].proj_out.training, 140591004393408) # hidden_states = gate * self.proj_out(hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:98 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].proj_out._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].proj_out.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[5].proj_out.lora_A, 140533113719008) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].proj_out.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].proj_out.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[5].proj_out.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].proj_out.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[5].proj_out.lora_A['default_0'], 140533113728992) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].proj_out.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].proj_out.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[5].proj_out.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].proj_out.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].proj_out.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[5].proj_out.lora_A['default_0'].weight, 140526770043728) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].proj_out.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[5].proj_out.lora_B, 140533113722560) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].proj_out.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].proj_out.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[5].proj_out.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].proj_out.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[5].proj_out.lora_B['default_0'], 140533113724096) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].proj_out.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].proj_out.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[5].proj_out.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].proj_out.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[5].proj_out.base_layer, 140581770190400) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].proj_out.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].proj_out.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[5].proj_out.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].proj_out.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[5].proj_out.lora_dropout, 140533113731536) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].proj_out.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].proj_out.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[5].proj_out.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].proj_out.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[5].proj_out.lora_dropout['default_0'], 140533113722512) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].proj_out.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].proj_out.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[5].proj_out.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].proj_out.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[5].proj_out.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[5].proj_out.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].proj_out.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[5].proj_out.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].proj_out.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[5].proj_out.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[5].proj_out.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].proj_out.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[5].proj_out.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].proj_out._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].proj_out._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].proj_out.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[5].proj_out.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[5].proj_out.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].proj_out._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[5].proj_out._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].proj_out._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].proj_out._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5].proj_out._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[5].proj_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[5].proj_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[5]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6], accessed_by=GetItemGuardAccessor(6) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[6], 140581770190064) # for index_block, block in enumerate(self.single_transformer_blocks): # diffusers/src/diffusers/models/transformers/transformer_flux.py:509 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[6].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[6].training, 140591004393440) # for index_block, block in enumerate(self.single_transformer_blocks): # diffusers/src/diffusers/models/transformers/transformer_flux.py:509 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].attn, accessed_by=DictGetItemGuardAccessor(attn) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[6].attn, 140581770191264) # attn_output = self.attn( # diffusers/src/diffusers/models/transformers/transformer_flux.py:91 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].attn.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[6].attn.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].attn.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[6].attn.training, 140591004393440) # attn_output = self.attn( # diffusers/src/diffusers/models/transformers/transformer_flux.py:91 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].attn._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].attn.to_k, accessed_by=DictGetItemGuardAccessor(to_k) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[6].attn.to_k, 140533113769744) # key = attn.to_k(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1717 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].attn.to_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[6].attn.to_k.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].attn.to_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[6].attn.to_k.training, 140591004393408) # key = attn.to_k(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1717 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].attn.to_k._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].attn.to_k.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[6].attn.to_k.lora_A, 140533113774832) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].attn.to_k.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].attn.to_k.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[6].attn.to_k.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].attn.to_k.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[6].attn.to_k.lora_A['default_0'], 140533113781648) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].attn.to_k.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].attn.to_k.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[6].attn.to_k.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].attn.to_k.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].attn.to_k.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[6].attn.to_k.lora_A['default_0'].weight, 140526768414592) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].attn.to_k.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[6].attn.to_k.lora_B, 140533113773440) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].attn.to_k.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].attn.to_k.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[6].attn.to_k.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].attn.to_k.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[6].attn.to_k.lora_B['default_0'], 140533113769408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].attn.to_k.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].attn.to_k.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[6].attn.to_k.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].attn.to_k.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[6].attn.to_k.base_layer, 140581770191408) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].attn.to_k.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].attn.to_k.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[6].attn.to_k.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].attn.to_k.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[6].attn.to_k.lora_dropout, 140533113773536) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].attn.to_k.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].attn.to_k.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[6].attn.to_k.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].attn.to_k.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[6].attn.to_k.lora_dropout['default_0'], 140533113774448) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].attn.to_k.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].attn.to_k.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[6].attn.to_k.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].attn.to_k.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[6].attn.to_k.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[6].attn.to_k.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].attn.to_k.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[6].attn.to_k.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].attn.to_k.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[6].attn.to_k.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[6].attn.to_k.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].attn.to_k.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[6].attn.to_k.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].attn.to_k._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].attn.to_k._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].attn.to_k.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[6].attn.to_k.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[6].attn.to_k.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].attn.to_k._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[6].attn.to_k._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].attn.to_k._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].attn.to_k._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].attn.to_k._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[6].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[6].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].attn.to_q, accessed_by=DictGetItemGuardAccessor(to_q) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[6].attn.to_q, 140533113777952) # query = attn.to_q(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1716 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].attn.to_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[6].attn.to_q.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].attn.to_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[6].attn.to_q.training, 140591004393408) # query = attn.to_q(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1716 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].attn.to_q._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].attn.to_q.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[6].attn.to_q.lora_A, 140533113777520) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].attn.to_q.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].attn.to_q.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[6].attn.to_q.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].attn.to_q.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[6].attn.to_q.lora_A['default_0'], 140533113768736) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].attn.to_q.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].attn.to_q.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[6].attn.to_q.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].attn.to_q.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].attn.to_q.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[6].attn.to_q.lora_A['default_0'].weight, 140537655623984) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].attn.to_q.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[6].attn.to_q.lora_B, 140533113780496) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].attn.to_q.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].attn.to_q.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[6].attn.to_q.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].attn.to_q.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[6].attn.to_q.lora_B['default_0'], 140533113779728) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].attn.to_q.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].attn.to_q.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[6].attn.to_q.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].attn.to_q.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[6].attn.to_q.base_layer, 140581770191504) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].attn.to_q.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].attn.to_q.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[6].attn.to_q.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].attn.to_q.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[6].attn.to_q.lora_dropout, 140533113768016) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].attn.to_q.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].attn.to_q.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[6].attn.to_q.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].attn.to_q.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[6].attn.to_q.lora_dropout['default_0'], 140533113780880) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].attn.to_q.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].attn.to_q.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[6].attn.to_q.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].attn.to_q.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[6].attn.to_q.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[6].attn.to_q.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].attn.to_q.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[6].attn.to_q.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].attn.to_q.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[6].attn.to_q.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[6].attn.to_q.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].attn.to_q.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[6].attn.to_q.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].attn.to_q._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].attn.to_q._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].attn.to_q.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[6].attn.to_q.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[6].attn.to_q.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].attn.to_q._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[6].attn.to_q._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].attn.to_q._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].attn.to_q._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].attn.to_q._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[6].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[6].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].attn.to_v, accessed_by=DictGetItemGuardAccessor(to_v) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[6].attn.to_v, 140533113781120) # value = attn.to_v(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1718 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].attn.to_v.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[6].attn.to_v.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].attn.to_v.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[6].attn.to_v.training, 140591004393408) # value = attn.to_v(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1718 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].attn.to_v._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].attn.to_v.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[6].attn.to_v.lora_A, 140533113776800) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].attn.to_v.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].attn.to_v.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[6].attn.to_v.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].attn.to_v.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[6].attn.to_v.lora_A['default_0'], 140533113781312) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].attn.to_v.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].attn.to_v.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[6].attn.to_v.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].attn.to_v.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].attn.to_v.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[6].attn.to_v.lora_A['default_0'].weight, 140526768411232) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].attn.to_v.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[6].attn.to_v.lora_B, 140533113780208) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].attn.to_v.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].attn.to_v.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[6].attn.to_v.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].attn.to_v.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[6].attn.to_v.lora_B['default_0'], 140533113780064) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].attn.to_v.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].attn.to_v.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[6].attn.to_v.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].attn.to_v.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[6].attn.to_v.base_layer, 140581770191552) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].attn.to_v.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].attn.to_v.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[6].attn.to_v.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].attn.to_v.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[6].attn.to_v.lora_dropout, 140533113780256) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].attn.to_v.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].attn.to_v.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[6].attn.to_v.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].attn.to_v.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[6].attn.to_v.lora_dropout['default_0'], 140533113774784) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].attn.to_v.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].attn.to_v.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[6].attn.to_v.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].attn.to_v.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[6].attn.to_v.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[6].attn.to_v.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].attn.to_v.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[6].attn.to_v.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].attn.to_v.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[6].attn.to_v.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[6].attn.to_v.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].attn.to_v.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[6].attn.to_v.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].attn.to_v._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].attn.to_v._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].attn.to_v.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[6].attn.to_v.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[6].attn.to_v.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].attn.to_v._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[6].attn.to_v._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].attn.to_v._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].attn.to_v._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].attn.to_v._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[6].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[6].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].attn.norm_k, accessed_by=DictGetItemGuardAccessor(norm_k) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[6].attn.norm_k, 140581770191456) # if attn.norm_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1729 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].attn.norm_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[6].attn.norm_k.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].attn.norm_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[6].attn.norm_k.training, 140591004393440) # if attn.norm_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1729 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].attn.norm_k.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[6].attn.norm_k.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].attn.norm_k._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].attn.norm_k.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[6].attn.norm_k.weight, 140581773356832) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].attn.norm_k._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].attn.norm_k._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].attn.norm_k._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].attn.norm_k._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].attn.norm_q, accessed_by=DictGetItemGuardAccessor(norm_q) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[6].attn.norm_q, 140581770191312) # if attn.norm_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1727 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].attn.norm_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[6].attn.norm_q.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].attn.norm_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[6].attn.norm_q.training, 140591004393440) # if attn.norm_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1727 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].attn.norm_q.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[6].attn.norm_q.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].attn.norm_q._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].attn.norm_q.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[6].attn.norm_q.weight, 140581773349152) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].attn.norm_q._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].attn.norm_q._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].attn.norm_q._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].attn.norm_q._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].attn.heads, accessed_by=DictGetItemGuardAccessor(heads) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[6].attn.heads == 24 # head_dim = inner_dim // attn.heads # diffusers/src/diffusers/models/attention_processor.py:1721 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].attn.processor, accessed_by=DictGetItemGuardAccessor(processor) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[6].attn.processor, 93831581524080) # attn_parameters = set(inspect.signature(self.processor.__call__).parameters.keys()) # diffusers/src/diffusers/models/attention_processor.py:479 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[6].attn.processor, 140581770191216) # return self.processor( # diffusers/src/diffusers/models/attention_processor.py:490 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].attn._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].attn._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].attn._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].attn._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].attn.forward, accessed_by=GetAttrGuardAccessor(forward) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].attn.forward, accessed_by=FuncDefaultsGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].attn.forward.__defaults__[0], accessed_by=GetItemGuardAccessor(0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[6].attn.forward.__defaults__[0], 140591004478624) # batch_size, _, _ = hidden_states.shape if encoder_hidden_states is None else encoder_hidden_states.shape # diffusers/src/diffusers/models/attention_processor.py:1713 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].norm, accessed_by=DictGetItemGuardAccessor(norm) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[6].norm, 140581770190880) # norm_hidden_states, gate = self.norm(hidden_states, emb=temb) # diffusers/src/diffusers/models/transformers/transformer_flux.py:88 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].norm.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[6].norm.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].norm.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[6].norm.training, 140591004393440) # norm_hidden_states, gate = self.norm(hidden_states, emb=temb) # diffusers/src/diffusers/models/transformers/transformer_flux.py:88 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].norm._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].norm.norm, accessed_by=DictGetItemGuardAccessor(norm) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[6].norm.norm, 140581770191024) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:171 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].norm.norm.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].norm.norm.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[6].norm.norm.training, 140591004393440) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:171 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].norm.silu, accessed_by=DictGetItemGuardAccessor(silu) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[6].norm.silu, 140581770190928) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].norm.silu.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].norm.silu.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[6].norm.silu.training, 140591004393440) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].norm.linear, accessed_by=DictGetItemGuardAccessor(linear) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[6].norm.linear, 140533114275584) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].norm.linear.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[6].norm.linear.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].norm.linear.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[6].norm.linear.training, 140591004393408) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].norm.linear._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].norm.linear.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[6].norm.linear.lora_A, 140533114275392) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].norm.linear.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].norm.linear.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[6].norm.linear.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].norm.linear.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[6].norm.linear.lora_A['default_0'], 140533113601424) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].norm.linear.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].norm.linear.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[6].norm.linear.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].norm.linear.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].norm.linear.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[6].norm.linear.lora_A['default_0'].weight, 140537655630624) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].norm.linear.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[6].norm.linear.lora_B, 140533113590000) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].norm.linear.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].norm.linear.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[6].norm.linear.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].norm.linear.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[6].norm.linear.lora_B['default_0'], 140533113594752) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].norm.linear.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].norm.linear.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[6].norm.linear.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].norm.linear.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[6].norm.linear.base_layer, 140581770190976) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].norm.linear.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].norm.linear.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[6].norm.linear.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].norm.linear.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[6].norm.linear.lora_dropout, 140533114286384) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].norm.linear.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].norm.linear.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[6].norm.linear.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].norm.linear.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[6].norm.linear.lora_dropout['default_0'], 140533114275440) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].norm.linear.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].norm.linear.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[6].norm.linear.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].norm.linear.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[6].norm.linear.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[6].norm.linear.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].norm.linear.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[6].norm.linear.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].norm.linear.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[6].norm.linear.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[6].norm.linear.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].norm.linear.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[6].norm.linear.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].norm.linear._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].norm.linear._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].norm.linear.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[6].norm.linear.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[6].norm.linear.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].norm.linear._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[6].norm.linear._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].norm.linear._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].norm.linear._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].norm.linear._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[6].norm.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[6].norm.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].norm._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].norm._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].norm._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].norm._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].act_mlp, accessed_by=DictGetItemGuardAccessor(act_mlp) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[6].act_mlp, 140581770191120) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].act_mlp.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].act_mlp.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[6].act_mlp.training, 140591004393440) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].proj_mlp, accessed_by=DictGetItemGuardAccessor(proj_mlp) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[6].proj_mlp, 140533113776224) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].proj_mlp.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[6].proj_mlp.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].proj_mlp.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[6].proj_mlp.training, 140591004393408) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].proj_mlp._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].proj_mlp.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[6].proj_mlp.lora_A, 140533113775600) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].proj_mlp.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].proj_mlp.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[6].proj_mlp.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].proj_mlp.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[6].proj_mlp.lora_A['default_0'], 140533113772768) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].proj_mlp.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].proj_mlp.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[6].proj_mlp.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].proj_mlp.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].proj_mlp.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[6].proj_mlp.lora_A['default_0'].weight, 140537655639264) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].proj_mlp.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[6].proj_mlp.lora_B, 140533113774640) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].proj_mlp.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].proj_mlp.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[6].proj_mlp.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].proj_mlp.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[6].proj_mlp.lora_B['default_0'], 140533113776176) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].proj_mlp.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].proj_mlp.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[6].proj_mlp.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].proj_mlp.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[6].proj_mlp.base_layer, 140581770191072) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].proj_mlp.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].proj_mlp.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[6].proj_mlp.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].proj_mlp.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[6].proj_mlp.lora_dropout, 140533113775840) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].proj_mlp.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].proj_mlp.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[6].proj_mlp.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].proj_mlp.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[6].proj_mlp.lora_dropout['default_0'], 140533113774592) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].proj_mlp.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].proj_mlp.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[6].proj_mlp.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].proj_mlp.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[6].proj_mlp.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[6].proj_mlp.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].proj_mlp.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[6].proj_mlp.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].proj_mlp.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[6].proj_mlp.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[6].proj_mlp.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].proj_mlp.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[6].proj_mlp.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].proj_mlp._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].proj_mlp._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].proj_mlp.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[6].proj_mlp.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[6].proj_mlp.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].proj_mlp._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[6].proj_mlp._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].proj_mlp._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].proj_mlp._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].proj_mlp._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[6].proj_mlp._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[6].proj_mlp._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].proj_out, accessed_by=DictGetItemGuardAccessor(proj_out) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[6].proj_out, 140533113775888) # hidden_states = gate * self.proj_out(hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:98 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].proj_out.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[6].proj_out.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].proj_out.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[6].proj_out.training, 140591004393408) # hidden_states = gate * self.proj_out(hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:98 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].proj_out._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].proj_out.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[6].proj_out.lora_A, 140533113778000) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].proj_out.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].proj_out.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[6].proj_out.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].proj_out.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[6].proj_out.lora_A['default_0'], 140533113775504) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].proj_out.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].proj_out.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[6].proj_out.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].proj_out.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].proj_out.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[6].proj_out.lora_A['default_0'].weight, 140537655627904) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].proj_out.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[6].proj_out.lora_B, 140533113777616) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].proj_out.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].proj_out.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[6].proj_out.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].proj_out.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[6].proj_out.lora_B['default_0'], 140533113773248) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].proj_out.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].proj_out.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[6].proj_out.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].proj_out.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[6].proj_out.base_layer, 140581770191168) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].proj_out.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].proj_out.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[6].proj_out.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].proj_out.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[6].proj_out.lora_dropout, 140533113772960) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].proj_out.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].proj_out.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[6].proj_out.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].proj_out.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[6].proj_out.lora_dropout['default_0'], 140533113780592) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].proj_out.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].proj_out.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[6].proj_out.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].proj_out.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[6].proj_out.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[6].proj_out.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].proj_out.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[6].proj_out.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].proj_out.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[6].proj_out.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[6].proj_out.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].proj_out.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[6].proj_out.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].proj_out._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].proj_out._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].proj_out.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[6].proj_out.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[6].proj_out.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].proj_out._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[6].proj_out._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].proj_out._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].proj_out._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6].proj_out._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[6].proj_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[6].proj_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[6]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7], accessed_by=GetItemGuardAccessor(7) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[7], 140581770190832) # for index_block, block in enumerate(self.single_transformer_blocks): # diffusers/src/diffusers/models/transformers/transformer_flux.py:509 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[7].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[7].training, 140591004393440) # for index_block, block in enumerate(self.single_transformer_blocks): # diffusers/src/diffusers/models/transformers/transformer_flux.py:509 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].attn, accessed_by=DictGetItemGuardAccessor(attn) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[7].attn, 140581770192032) # attn_output = self.attn( # diffusers/src/diffusers/models/transformers/transformer_flux.py:91 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].attn.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[7].attn.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].attn.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[7].attn.training, 140591004393440) # attn_output = self.attn( # diffusers/src/diffusers/models/transformers/transformer_flux.py:91 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].attn._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].attn.to_k, accessed_by=DictGetItemGuardAccessor(to_k) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[7].attn.to_k, 140533114123792) # key = attn.to_k(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1717 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].attn.to_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[7].attn.to_k.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].attn.to_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[7].attn.to_k.training, 140591004393408) # key = attn.to_k(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1717 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].attn.to_k._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].attn.to_k.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[7].attn.to_k.lora_A, 140533114551648) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].attn.to_k.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].attn.to_k.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[7].attn.to_k.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].attn.to_k.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[7].attn.to_k.lora_A['default_0'], 140533114543296) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].attn.to_k.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].attn.to_k.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[7].attn.to_k.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].attn.to_k.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].attn.to_k.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[7].attn.to_k.lora_A['default_0'].weight, 140526776618896) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].attn.to_k.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[7].attn.to_k.lora_B, 140533114539792) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].attn.to_k.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].attn.to_k.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[7].attn.to_k.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].attn.to_k.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[7].attn.to_k.lora_B['default_0'], 140533114552224) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].attn.to_k.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].attn.to_k.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[7].attn.to_k.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].attn.to_k.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[7].attn.to_k.base_layer, 140581770192176) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].attn.to_k.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].attn.to_k.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[7].attn.to_k.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].attn.to_k.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[7].attn.to_k.lora_dropout, 140533114551360) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].attn.to_k.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].attn.to_k.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[7].attn.to_k.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].attn.to_k.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[7].attn.to_k.lora_dropout['default_0'], 140533114111216) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].attn.to_k.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].attn.to_k.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[7].attn.to_k.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].attn.to_k.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[7].attn.to_k.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[7].attn.to_k.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].attn.to_k.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[7].attn.to_k.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].attn.to_k.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[7].attn.to_k.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[7].attn.to_k.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].attn.to_k.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[7].attn.to_k.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].attn.to_k._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].attn.to_k._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].attn.to_k.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[7].attn.to_k.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[7].attn.to_k.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].attn.to_k._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[7].attn.to_k._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].attn.to_k._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].attn.to_k._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].attn.to_k._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[7].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[7].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].attn.to_q, accessed_by=DictGetItemGuardAccessor(to_q) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[7].attn.to_q, 140533114310272) # query = attn.to_q(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1716 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].attn.to_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[7].attn.to_q.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].attn.to_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[7].attn.to_q.training, 140591004393408) # query = attn.to_q(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1716 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].attn.to_q._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].attn.to_q.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[7].attn.to_q.lora_A, 140533114321504) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].attn.to_q.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].attn.to_q.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[7].attn.to_q.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].attn.to_q.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[7].attn.to_q.lora_A['default_0'], 140533114124224) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].attn.to_q.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].attn.to_q.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[7].attn.to_q.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].attn.to_q.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].attn.to_q.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[7].attn.to_q.lora_A['default_0'].weight, 140526776615776) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].attn.to_q.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[7].attn.to_q.lora_B, 140533114309936) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].attn.to_q.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].attn.to_q.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[7].attn.to_q.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].attn.to_q.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[7].attn.to_q.lora_B['default_0'], 140533114121536) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].attn.to_q.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].attn.to_q.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[7].attn.to_q.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].attn.to_q.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[7].attn.to_q.base_layer, 140581770192272) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].attn.to_q.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].attn.to_q.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[7].attn.to_q.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].attn.to_q.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[7].attn.to_q.lora_dropout, 140533114308832) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].attn.to_q.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].attn.to_q.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[7].attn.to_q.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].attn.to_q.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[7].attn.to_q.lora_dropout['default_0'], 140533114309600) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].attn.to_q.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].attn.to_q.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[7].attn.to_q.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].attn.to_q.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[7].attn.to_q.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[7].attn.to_q.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].attn.to_q.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[7].attn.to_q.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].attn.to_q.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[7].attn.to_q.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[7].attn.to_q.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].attn.to_q.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[7].attn.to_q.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].attn.to_q._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].attn.to_q._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].attn.to_q.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[7].attn.to_q.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[7].attn.to_q.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].attn.to_q._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[7].attn.to_q._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].attn.to_q._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].attn.to_q._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].attn.to_q._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[7].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[7].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].attn.to_v, accessed_by=DictGetItemGuardAccessor(to_v) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[7].attn.to_v, 140533114552080) # value = attn.to_v(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1718 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].attn.to_v.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[7].attn.to_v.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].attn.to_v.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[7].attn.to_v.training, 140591004393408) # value = attn.to_v(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1718 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].attn.to_v._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].attn.to_v.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[7].attn.to_v.lora_A, 140533114551120) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].attn.to_v.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].attn.to_v.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[7].attn.to_v.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].attn.to_v.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[7].attn.to_v.lora_A['default_0'], 140533114079264) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].attn.to_v.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].attn.to_v.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[7].attn.to_v.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].attn.to_v.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].attn.to_v.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[7].attn.to_v.lora_A['default_0'].weight, 140526268535872) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].attn.to_v.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[7].attn.to_v.lora_B, 140533114541376) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].attn.to_v.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].attn.to_v.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[7].attn.to_v.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].attn.to_v.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[7].attn.to_v.lora_B['default_0'], 140533114081808) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].attn.to_v.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].attn.to_v.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[7].attn.to_v.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].attn.to_v.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[7].attn.to_v.base_layer, 140581770192320) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].attn.to_v.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].attn.to_v.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[7].attn.to_v.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].attn.to_v.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[7].attn.to_v.lora_dropout, 140533114541520) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].attn.to_v.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].attn.to_v.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[7].attn.to_v.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].attn.to_v.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[7].attn.to_v.lora_dropout['default_0'], 140533114548096) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].attn.to_v.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].attn.to_v.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[7].attn.to_v.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].attn.to_v.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[7].attn.to_v.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[7].attn.to_v.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].attn.to_v.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[7].attn.to_v.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].attn.to_v.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[7].attn.to_v.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[7].attn.to_v.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].attn.to_v.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[7].attn.to_v.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].attn.to_v._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].attn.to_v._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].attn.to_v.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[7].attn.to_v.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[7].attn.to_v.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].attn.to_v._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[7].attn.to_v._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].attn.to_v._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].attn.to_v._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].attn.to_v._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[7].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[7].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].attn.norm_k, accessed_by=DictGetItemGuardAccessor(norm_k) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[7].attn.norm_k, 140581770192224) # if attn.norm_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1729 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].attn.norm_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[7].attn.norm_k.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].attn.norm_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[7].attn.norm_k.training, 140591004393440) # if attn.norm_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1729 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].attn.norm_k.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[7].attn.norm_k.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].attn.norm_k._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].attn.norm_k.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[7].attn.norm_k.weight, 140581765865344) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].attn.norm_k._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].attn.norm_k._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].attn.norm_k._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].attn.norm_k._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].attn.norm_q, accessed_by=DictGetItemGuardAccessor(norm_q) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[7].attn.norm_q, 140581770192080) # if attn.norm_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1727 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].attn.norm_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[7].attn.norm_q.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].attn.norm_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[7].attn.norm_q.training, 140591004393440) # if attn.norm_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1727 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].attn.norm_q.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[7].attn.norm_q.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].attn.norm_q._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].attn.norm_q.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[7].attn.norm_q.weight, 140581783351872) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].attn.norm_q._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].attn.norm_q._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].attn.norm_q._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].attn.norm_q._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].attn.heads, accessed_by=DictGetItemGuardAccessor(heads) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[7].attn.heads == 24 # head_dim = inner_dim // attn.heads # diffusers/src/diffusers/models/attention_processor.py:1721 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].attn.processor, accessed_by=DictGetItemGuardAccessor(processor) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[7].attn.processor, 93831581524080) # attn_parameters = set(inspect.signature(self.processor.__call__).parameters.keys()) # diffusers/src/diffusers/models/attention_processor.py:479 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[7].attn.processor, 140581770191984) # return self.processor( # diffusers/src/diffusers/models/attention_processor.py:490 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].attn._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].attn._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].attn._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].attn._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].attn.forward, accessed_by=GetAttrGuardAccessor(forward) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].attn.forward, accessed_by=FuncDefaultsGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].attn.forward.__defaults__[0], accessed_by=GetItemGuardAccessor(0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[7].attn.forward.__defaults__[0], 140591004478624) # batch_size, _, _ = hidden_states.shape if encoder_hidden_states is None else encoder_hidden_states.shape # diffusers/src/diffusers/models/attention_processor.py:1713 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].norm, accessed_by=DictGetItemGuardAccessor(norm) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[7].norm, 140581770191648) # norm_hidden_states, gate = self.norm(hidden_states, emb=temb) # diffusers/src/diffusers/models/transformers/transformer_flux.py:88 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].norm.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[7].norm.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].norm.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[7].norm.training, 140591004393440) # norm_hidden_states, gate = self.norm(hidden_states, emb=temb) # diffusers/src/diffusers/models/transformers/transformer_flux.py:88 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].norm._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].norm.norm, accessed_by=DictGetItemGuardAccessor(norm) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[7].norm.norm, 140581770191792) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:171 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].norm.norm.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].norm.norm.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[7].norm.norm.training, 140591004393440) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:171 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].norm.silu, accessed_by=DictGetItemGuardAccessor(silu) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[7].norm.silu, 140581770191696) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].norm.silu.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].norm.silu.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[7].norm.silu.training, 140591004393440) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].norm.linear, accessed_by=DictGetItemGuardAccessor(linear) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[7].norm.linear, 140533115451376) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].norm.linear.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[7].norm.linear.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].norm.linear.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[7].norm.linear.training, 140591004393408) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].norm.linear._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].norm.linear.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[7].norm.linear.lora_A, 140533115696368) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].norm.linear.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].norm.linear.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[7].norm.linear.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].norm.linear.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[7].norm.linear.lora_A['default_0'], 140533115176704) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].norm.linear.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].norm.linear.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[7].norm.linear.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].norm.linear.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].norm.linear.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[7].norm.linear.lora_A['default_0'].weight, 140526768416752) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].norm.linear.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[7].norm.linear.lora_B, 140533115176320) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].norm.linear.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].norm.linear.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[7].norm.linear.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].norm.linear.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[7].norm.linear.lora_B['default_0'], 140533115181168) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].norm.linear.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].norm.linear.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[7].norm.linear.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].norm.linear.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[7].norm.linear.base_layer, 140581770191744) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].norm.linear.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].norm.linear.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[7].norm.linear.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].norm.linear.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[7].norm.linear.lora_dropout, 140533115695408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].norm.linear.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].norm.linear.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[7].norm.linear.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].norm.linear.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[7].norm.linear.lora_dropout['default_0'], 140533115685232) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].norm.linear.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].norm.linear.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[7].norm.linear.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].norm.linear.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[7].norm.linear.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[7].norm.linear.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].norm.linear.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[7].norm.linear.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].norm.linear.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[7].norm.linear.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[7].norm.linear.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].norm.linear.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[7].norm.linear.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].norm.linear._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].norm.linear._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].norm.linear.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[7].norm.linear.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[7].norm.linear.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].norm.linear._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[7].norm.linear._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].norm.linear._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].norm.linear._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].norm.linear._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[7].norm.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[7].norm.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].norm._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].norm._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].norm._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].norm._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].act_mlp, accessed_by=DictGetItemGuardAccessor(act_mlp) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[7].act_mlp, 140581770191888) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].act_mlp.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].act_mlp.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[7].act_mlp.training, 140591004393440) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].proj_mlp, accessed_by=DictGetItemGuardAccessor(proj_mlp) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[7].proj_mlp, 140533115182800) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].proj_mlp.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[7].proj_mlp.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].proj_mlp.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[7].proj_mlp.training, 140591004393408) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].proj_mlp._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].proj_mlp.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[7].proj_mlp.lora_A, 140533115177088) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].proj_mlp.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].proj_mlp.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[7].proj_mlp.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].proj_mlp.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[7].proj_mlp.lora_A['default_0'], 140533115187792) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].proj_mlp.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].proj_mlp.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[7].proj_mlp.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].proj_mlp.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].proj_mlp.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[7].proj_mlp.lora_A['default_0'].weight, 140526768421632) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].proj_mlp.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[7].proj_mlp.lora_B, 140533115176656) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].proj_mlp.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].proj_mlp.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[7].proj_mlp.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].proj_mlp.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[7].proj_mlp.lora_B['default_0'], 140533115178336) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].proj_mlp.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].proj_mlp.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[7].proj_mlp.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].proj_mlp.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[7].proj_mlp.base_layer, 140581770191840) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].proj_mlp.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].proj_mlp.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[7].proj_mlp.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].proj_mlp.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[7].proj_mlp.lora_dropout, 140533115185584) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].proj_mlp.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].proj_mlp.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[7].proj_mlp.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].proj_mlp.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[7].proj_mlp.lora_dropout['default_0'], 140533115182704) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].proj_mlp.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].proj_mlp.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[7].proj_mlp.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].proj_mlp.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[7].proj_mlp.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[7].proj_mlp.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].proj_mlp.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[7].proj_mlp.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].proj_mlp.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[7].proj_mlp.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[7].proj_mlp.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].proj_mlp.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[7].proj_mlp.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].proj_mlp._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].proj_mlp._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].proj_mlp.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[7].proj_mlp.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[7].proj_mlp.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].proj_mlp._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[7].proj_mlp._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].proj_mlp._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].proj_mlp._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].proj_mlp._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[7].proj_mlp._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[7].proj_mlp._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].proj_out, accessed_by=DictGetItemGuardAccessor(proj_out) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[7].proj_out, 140533115180400) # hidden_states = gate * self.proj_out(hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:98 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].proj_out.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[7].proj_out.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].proj_out.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[7].proj_out.training, 140591004393408) # hidden_states = gate * self.proj_out(hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:98 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].proj_out._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].proj_out.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[7].proj_out.lora_A, 140533115183856) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].proj_out.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].proj_out.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[7].proj_out.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].proj_out.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[7].proj_out.lora_A['default_0'], 140533115182512) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].proj_out.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].proj_out.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[7].proj_out.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].proj_out.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].proj_out.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[7].proj_out.lora_A['default_0'].weight, 140526768412992) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].proj_out.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[7].proj_out.lora_B, 140533115181072) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].proj_out.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].proj_out.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[7].proj_out.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].proj_out.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[7].proj_out.lora_B['default_0'], 140533115175264) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].proj_out.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].proj_out.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[7].proj_out.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].proj_out.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[7].proj_out.base_layer, 140581770191936) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].proj_out.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].proj_out.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[7].proj_out.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].proj_out.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[7].proj_out.lora_dropout, 140533115180544) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].proj_out.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].proj_out.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[7].proj_out.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].proj_out.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[7].proj_out.lora_dropout['default_0'], 140533115185392) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].proj_out.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].proj_out.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[7].proj_out.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].proj_out.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[7].proj_out.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[7].proj_out.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].proj_out.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[7].proj_out.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].proj_out.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[7].proj_out.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[7].proj_out.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].proj_out.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[7].proj_out.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].proj_out._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].proj_out._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].proj_out.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[7].proj_out.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[7].proj_out.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].proj_out._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[7].proj_out._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].proj_out._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].proj_out._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7].proj_out._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[7].proj_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[7].proj_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[7]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8], accessed_by=GetItemGuardAccessor(8) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[8], 140581770191600) # for index_block, block in enumerate(self.single_transformer_blocks): # diffusers/src/diffusers/models/transformers/transformer_flux.py:509 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[8].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[8].training, 140591004393440) # for index_block, block in enumerate(self.single_transformer_blocks): # diffusers/src/diffusers/models/transformers/transformer_flux.py:509 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].attn, accessed_by=DictGetItemGuardAccessor(attn) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[8].attn, 140581770192800) # attn_output = self.attn( # diffusers/src/diffusers/models/transformers/transformer_flux.py:91 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].attn.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[8].attn.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].attn.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[8].attn.training, 140591004393440) # attn_output = self.attn( # diffusers/src/diffusers/models/transformers/transformer_flux.py:91 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].attn._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].attn.to_k, accessed_by=DictGetItemGuardAccessor(to_k) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[8].attn.to_k, 140533112541760) # key = attn.to_k(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1717 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].attn.to_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[8].attn.to_k.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].attn.to_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[8].attn.to_k.training, 140591004393408) # key = attn.to_k(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1717 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].attn.to_k._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].attn.to_k.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[8].attn.to_k.lora_A, 140533112542576) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].attn.to_k.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].attn.to_k.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[8].attn.to_k.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].attn.to_k.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[8].attn.to_k.lora_A['default_0'], 140533112541424) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].attn.to_k.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].attn.to_k.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[8].attn.to_k.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].attn.to_k.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].attn.to_k.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[8].attn.to_k.lora_A['default_0'].weight, 140526268541552) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].attn.to_k.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[8].attn.to_k.lora_B, 140533112542096) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].attn.to_k.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].attn.to_k.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[8].attn.to_k.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].attn.to_k.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[8].attn.to_k.lora_B['default_0'], 140533112542528) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].attn.to_k.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].attn.to_k.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[8].attn.to_k.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].attn.to_k.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[8].attn.to_k.base_layer, 140581770192944) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].attn.to_k.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].attn.to_k.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[8].attn.to_k.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].attn.to_k.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[8].attn.to_k.lora_dropout, 140533112540560) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].attn.to_k.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].attn.to_k.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[8].attn.to_k.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].attn.to_k.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[8].attn.to_k.lora_dropout['default_0'], 140533112541520) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].attn.to_k.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].attn.to_k.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[8].attn.to_k.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].attn.to_k.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[8].attn.to_k.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[8].attn.to_k.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].attn.to_k.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[8].attn.to_k.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].attn.to_k.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[8].attn.to_k.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[8].attn.to_k.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].attn.to_k.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[8].attn.to_k.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].attn.to_k._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].attn.to_k._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].attn.to_k.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[8].attn.to_k.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[8].attn.to_k.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].attn.to_k._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[8].attn.to_k._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].attn.to_k._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].attn.to_k._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].attn.to_k._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[8].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[8].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].attn.to_q, accessed_by=DictGetItemGuardAccessor(to_q) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[8].attn.to_q, 140533112540224) # query = attn.to_q(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1716 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].attn.to_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[8].attn.to_q.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].attn.to_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[8].attn.to_q.training, 140591004393408) # query = attn.to_q(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1716 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].attn.to_q._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].attn.to_q.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[8].attn.to_q.lora_A, 140533112544688) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].attn.to_q.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].attn.to_q.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[8].attn.to_q.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].attn.to_q.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[8].attn.to_q.lora_A['default_0'], 140533112540800) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].attn.to_q.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].attn.to_q.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[8].attn.to_q.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].attn.to_q.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].attn.to_q.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[8].attn.to_q.lora_A['default_0'].weight, 140526268541792) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].attn.to_q.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[8].attn.to_q.lora_B, 140533112544976) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].attn.to_q.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].attn.to_q.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[8].attn.to_q.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].attn.to_q.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[8].attn.to_q.lora_B['default_0'], 140533112544640) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].attn.to_q.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].attn.to_q.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[8].attn.to_q.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].attn.to_q.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[8].attn.to_q.base_layer, 140581770193040) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].attn.to_q.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].attn.to_q.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[8].attn.to_q.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].attn.to_q.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[8].attn.to_q.lora_dropout, 140533112540752) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].attn.to_q.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].attn.to_q.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[8].attn.to_q.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].attn.to_q.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[8].attn.to_q.lora_dropout['default_0'], 140533112538640) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].attn.to_q.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].attn.to_q.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[8].attn.to_q.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].attn.to_q.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[8].attn.to_q.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[8].attn.to_q.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].attn.to_q.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[8].attn.to_q.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].attn.to_q.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[8].attn.to_q.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[8].attn.to_q.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].attn.to_q.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[8].attn.to_q.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].attn.to_q._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].attn.to_q._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].attn.to_q.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[8].attn.to_q.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[8].attn.to_q.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].attn.to_q._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[8].attn.to_q._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].attn.to_q._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].attn.to_q._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].attn.to_q._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[8].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[8].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].attn.to_v, accessed_by=DictGetItemGuardAccessor(to_v) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[8].attn.to_v, 140533112538784) # value = attn.to_v(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1718 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].attn.to_v.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[8].attn.to_v.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].attn.to_v.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[8].attn.to_v.training, 140591004393408) # value = attn.to_v(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1718 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].attn.to_v._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].attn.to_v.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[8].attn.to_v.lora_A, 140533112543296) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].attn.to_v.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].attn.to_v.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[8].attn.to_v.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].attn.to_v.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[8].attn.to_v.lora_A['default_0'], 140533112543440) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].attn.to_v.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].attn.to_v.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[8].attn.to_v.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].attn.to_v.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].attn.to_v.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[8].attn.to_v.lora_A['default_0'].weight, 140526268538992) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].attn.to_v.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[8].attn.to_v.lora_B, 140533112541904) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].attn.to_v.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].attn.to_v.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[8].attn.to_v.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].attn.to_v.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[8].attn.to_v.lora_B['default_0'], 140533116695264) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].attn.to_v.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].attn.to_v.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[8].attn.to_v.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].attn.to_v.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[8].attn.to_v.base_layer, 140581770193088) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].attn.to_v.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].attn.to_v.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[8].attn.to_v.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].attn.to_v.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[8].attn.to_v.lora_dropout, 140533112542000) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].attn.to_v.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].attn.to_v.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[8].attn.to_v.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].attn.to_v.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[8].attn.to_v.lora_dropout['default_0'], 140533112540992) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].attn.to_v.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].attn.to_v.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[8].attn.to_v.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].attn.to_v.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[8].attn.to_v.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[8].attn.to_v.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].attn.to_v.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[8].attn.to_v.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].attn.to_v.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[8].attn.to_v.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[8].attn.to_v.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].attn.to_v.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[8].attn.to_v.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].attn.to_v._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].attn.to_v._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].attn.to_v.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[8].attn.to_v.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[8].attn.to_v.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].attn.to_v._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[8].attn.to_v._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].attn.to_v._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].attn.to_v._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].attn.to_v._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[8].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[8].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].attn.norm_k, accessed_by=DictGetItemGuardAccessor(norm_k) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[8].attn.norm_k, 140581770192992) # if attn.norm_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1729 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].attn.norm_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[8].attn.norm_k.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].attn.norm_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[8].attn.norm_k.training, 140591004393440) # if attn.norm_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1729 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].attn.norm_k.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[8].attn.norm_k.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].attn.norm_k._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].attn.norm_k.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[8].attn.norm_k.weight, 140581766103104) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].attn.norm_k._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].attn.norm_k._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].attn.norm_k._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].attn.norm_k._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].attn.norm_q, accessed_by=DictGetItemGuardAccessor(norm_q) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[8].attn.norm_q, 140581770192848) # if attn.norm_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1727 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].attn.norm_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[8].attn.norm_q.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].attn.norm_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[8].attn.norm_q.training, 140591004393440) # if attn.norm_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1727 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].attn.norm_q.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[8].attn.norm_q.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].attn.norm_q._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].attn.norm_q.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[8].attn.norm_q.weight, 140581773350752) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].attn.norm_q._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].attn.norm_q._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].attn.norm_q._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].attn.norm_q._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].attn.heads, accessed_by=DictGetItemGuardAccessor(heads) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[8].attn.heads == 24 # head_dim = inner_dim // attn.heads # diffusers/src/diffusers/models/attention_processor.py:1721 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].attn.processor, accessed_by=DictGetItemGuardAccessor(processor) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[8].attn.processor, 93831581524080) # attn_parameters = set(inspect.signature(self.processor.__call__).parameters.keys()) # diffusers/src/diffusers/models/attention_processor.py:479 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[8].attn.processor, 140581770192752) # return self.processor( # diffusers/src/diffusers/models/attention_processor.py:490 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].attn._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].attn._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].attn._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].attn._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].attn.forward, accessed_by=GetAttrGuardAccessor(forward) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].attn.forward, accessed_by=FuncDefaultsGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].attn.forward.__defaults__[0], accessed_by=GetItemGuardAccessor(0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[8].attn.forward.__defaults__[0], 140591004478624) # batch_size, _, _ = hidden_states.shape if encoder_hidden_states is None else encoder_hidden_states.shape # diffusers/src/diffusers/models/attention_processor.py:1713 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].norm, accessed_by=DictGetItemGuardAccessor(norm) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[8].norm, 140581770192416) # norm_hidden_states, gate = self.norm(hidden_states, emb=temb) # diffusers/src/diffusers/models/transformers/transformer_flux.py:88 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].norm.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[8].norm.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].norm.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[8].norm.training, 140591004393440) # norm_hidden_states, gate = self.norm(hidden_states, emb=temb) # diffusers/src/diffusers/models/transformers/transformer_flux.py:88 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].norm._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].norm.norm, accessed_by=DictGetItemGuardAccessor(norm) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[8].norm.norm, 140581770192560) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:171 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].norm.norm.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].norm.norm.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[8].norm.norm.training, 140591004393440) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:171 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].norm.silu, accessed_by=DictGetItemGuardAccessor(silu) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[8].norm.silu, 140581770192464) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].norm.silu.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].norm.silu.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[8].norm.silu.training, 140591004393440) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].norm.linear, accessed_by=DictGetItemGuardAccessor(linear) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[8].norm.linear, 140533114090016) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].norm.linear.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[8].norm.linear.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].norm.linear.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[8].norm.linear.training, 140591004393408) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].norm.linear._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].norm.linear.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[8].norm.linear.lora_A, 140533114088192) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].norm.linear.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].norm.linear.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[8].norm.linear.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].norm.linear.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[8].norm.linear.lora_A['default_0'], 140533112543104) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].norm.linear.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].norm.linear.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[8].norm.linear.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].norm.linear.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].norm.linear.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[8].norm.linear.lora_A['default_0'].weight, 140526268546672) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].norm.linear.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[8].norm.linear.lora_B, 140533112540512) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].norm.linear.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].norm.linear.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[8].norm.linear.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].norm.linear.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[8].norm.linear.lora_B['default_0'], 140533112548624) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].norm.linear.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].norm.linear.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[8].norm.linear.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].norm.linear.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[8].norm.linear.base_layer, 140581770192512) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].norm.linear.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].norm.linear.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[8].norm.linear.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].norm.linear.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[8].norm.linear.lora_dropout, 140533114077920) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].norm.linear.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].norm.linear.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[8].norm.linear.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].norm.linear.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[8].norm.linear.lora_dropout['default_0'], 140533114093376) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].norm.linear.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].norm.linear.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[8].norm.linear.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].norm.linear.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[8].norm.linear.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[8].norm.linear.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].norm.linear.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[8].norm.linear.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].norm.linear.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[8].norm.linear.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[8].norm.linear.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].norm.linear.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[8].norm.linear.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].norm.linear._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].norm.linear._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].norm.linear.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[8].norm.linear.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[8].norm.linear.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].norm.linear._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[8].norm.linear._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].norm.linear._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].norm.linear._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].norm.linear._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[8].norm.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[8].norm.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].norm._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].norm._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].norm._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].norm._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].act_mlp, accessed_by=DictGetItemGuardAccessor(act_mlp) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[8].act_mlp, 140581770192656) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].act_mlp.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].act_mlp.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[8].act_mlp.training, 140591004393440) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].proj_mlp, accessed_by=DictGetItemGuardAccessor(proj_mlp) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[8].proj_mlp, 140533112552416) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].proj_mlp.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[8].proj_mlp.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].proj_mlp.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[8].proj_mlp.training, 140591004393408) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].proj_mlp._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].proj_mlp.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[8].proj_mlp.lora_A, 140533112541232) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].proj_mlp.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].proj_mlp.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[8].proj_mlp.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].proj_mlp.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[8].proj_mlp.lora_A['default_0'], 140533112552656) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].proj_mlp.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].proj_mlp.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[8].proj_mlp.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].proj_mlp.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].proj_mlp.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[8].proj_mlp.lora_A['default_0'].weight, 140526268545152) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].proj_mlp.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[8].proj_mlp.lora_B, 140533112538208) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].proj_mlp.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].proj_mlp.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[8].proj_mlp.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].proj_mlp.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[8].proj_mlp.lora_B['default_0'], 140533112551456) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].proj_mlp.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].proj_mlp.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[8].proj_mlp.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].proj_mlp.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[8].proj_mlp.base_layer, 140581770192608) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].proj_mlp.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].proj_mlp.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[8].proj_mlp.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].proj_mlp.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[8].proj_mlp.lora_dropout, 140533112551840) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].proj_mlp.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].proj_mlp.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[8].proj_mlp.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].proj_mlp.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[8].proj_mlp.lora_dropout['default_0'], 140533112552512) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].proj_mlp.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].proj_mlp.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[8].proj_mlp.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].proj_mlp.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[8].proj_mlp.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[8].proj_mlp.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].proj_mlp.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[8].proj_mlp.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].proj_mlp.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[8].proj_mlp.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[8].proj_mlp.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].proj_mlp.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[8].proj_mlp.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].proj_mlp._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].proj_mlp._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].proj_mlp.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[8].proj_mlp.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[8].proj_mlp.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].proj_mlp._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[8].proj_mlp._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].proj_mlp._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].proj_mlp._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].proj_mlp._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[8].proj_mlp._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[8].proj_mlp._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].proj_out, accessed_by=DictGetItemGuardAccessor(proj_out) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[8].proj_out, 140533112543680) # hidden_states = gate * self.proj_out(hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:98 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].proj_out.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[8].proj_out.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].proj_out.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[8].proj_out.training, 140591004393408) # hidden_states = gate * self.proj_out(hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:98 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].proj_out._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].proj_out.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[8].proj_out.lora_A, 140533112550784) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].proj_out.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].proj_out.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[8].proj_out.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].proj_out.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[8].proj_out.lora_A['default_0'], 140533112550592) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].proj_out.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].proj_out.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[8].proj_out.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].proj_out.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].proj_out.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[8].proj_out.lora_A['default_0'].weight, 140526268542272) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].proj_out.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[8].proj_out.lora_B, 140533112550256) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].proj_out.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].proj_out.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[8].proj_out.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].proj_out.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[8].proj_out.lora_B['default_0'], 140533112545792) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].proj_out.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].proj_out.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[8].proj_out.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].proj_out.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[8].proj_out.base_layer, 140581770192704) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].proj_out.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].proj_out.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[8].proj_out.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].proj_out.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[8].proj_out.lora_dropout, 140533112546128) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].proj_out.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].proj_out.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[8].proj_out.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].proj_out.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[8].proj_out.lora_dropout['default_0'], 140533112546368) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].proj_out.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].proj_out.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[8].proj_out.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].proj_out.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[8].proj_out.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[8].proj_out.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].proj_out.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[8].proj_out.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].proj_out.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[8].proj_out.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[8].proj_out.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].proj_out.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[8].proj_out.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].proj_out._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].proj_out._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].proj_out.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[8].proj_out.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[8].proj_out.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].proj_out._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[8].proj_out._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].proj_out._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].proj_out._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8].proj_out._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[8].proj_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[8].proj_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[8]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9], accessed_by=GetItemGuardAccessor(9) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[9], 140581770192368) # for index_block, block in enumerate(self.single_transformer_blocks): # diffusers/src/diffusers/models/transformers/transformer_flux.py:509 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[9].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[9].training, 140591004393440) # for index_block, block in enumerate(self.single_transformer_blocks): # diffusers/src/diffusers/models/transformers/transformer_flux.py:509 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].attn, accessed_by=DictGetItemGuardAccessor(attn) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[9].attn, 140581770193568) # attn_output = self.attn( # diffusers/src/diffusers/models/transformers/transformer_flux.py:91 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].attn.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[9].attn.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].attn.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[9].attn.training, 140591004393440) # attn_output = self.attn( # diffusers/src/diffusers/models/transformers/transformer_flux.py:91 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].attn._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].attn.to_k, accessed_by=DictGetItemGuardAccessor(to_k) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[9].attn.to_k, 140533112559104) # key = attn.to_k(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1717 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].attn.to_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[9].attn.to_k.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].attn.to_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[9].attn.to_k.training, 140591004393408) # key = attn.to_k(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1717 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].attn.to_k._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].attn.to_k.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[9].attn.to_k.lora_A, 140533112560544) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].attn.to_k.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].attn.to_k.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[9].attn.to_k.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].attn.to_k.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[9].attn.to_k.lora_A['default_0'], 140533112560592) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].attn.to_k.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].attn.to_k.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[9].attn.to_k.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].attn.to_k.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].attn.to_k.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[9].attn.to_k.lora_A['default_0'].weight, 140526268530752) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].attn.to_k.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[9].attn.to_k.lora_B, 140533112566208) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].attn.to_k.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].attn.to_k.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[9].attn.to_k.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].attn.to_k.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[9].attn.to_k.lora_B['default_0'], 140533112560208) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].attn.to_k.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].attn.to_k.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[9].attn.to_k.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].attn.to_k.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[9].attn.to_k.base_layer, 140581770193712) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].attn.to_k.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].attn.to_k.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[9].attn.to_k.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].attn.to_k.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[9].attn.to_k.lora_dropout, 140533112558912) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].attn.to_k.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].attn.to_k.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[9].attn.to_k.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].attn.to_k.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[9].attn.to_k.lora_dropout['default_0'], 140533112556752) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].attn.to_k.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].attn.to_k.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[9].attn.to_k.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].attn.to_k.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[9].attn.to_k.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[9].attn.to_k.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].attn.to_k.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[9].attn.to_k.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].attn.to_k.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[9].attn.to_k.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[9].attn.to_k.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].attn.to_k.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[9].attn.to_k.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].attn.to_k._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].attn.to_k._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].attn.to_k.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[9].attn.to_k.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[9].attn.to_k.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].attn.to_k._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[9].attn.to_k._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].attn.to_k._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].attn.to_k._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].attn.to_k._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[9].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[9].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].attn.to_q, accessed_by=DictGetItemGuardAccessor(to_q) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[9].attn.to_q, 140533112565728) # query = attn.to_q(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1716 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].attn.to_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[9].attn.to_q.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].attn.to_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[9].attn.to_q.training, 140591004393408) # query = attn.to_q(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1716 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].attn.to_q._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].attn.to_q.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[9].attn.to_q.lora_A, 140533112559968) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].attn.to_q.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].attn.to_q.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[9].attn.to_q.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].attn.to_q.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[9].attn.to_q.lora_A['default_0'], 140533112560016) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].attn.to_q.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].attn.to_q.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[9].attn.to_q.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].attn.to_q.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].attn.to_q.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[9].attn.to_q.lora_A['default_0'].weight, 140526268532752) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].attn.to_q.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[9].attn.to_q.lora_B, 140533112568368) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].attn.to_q.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].attn.to_q.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[9].attn.to_q.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].attn.to_q.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[9].attn.to_q.lora_B['default_0'], 140533112567264) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].attn.to_q.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].attn.to_q.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[9].attn.to_q.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].attn.to_q.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[9].attn.to_q.base_layer, 140581770193808) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].attn.to_q.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].attn.to_q.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[9].attn.to_q.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].attn.to_q.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[9].attn.to_q.lora_dropout, 140533112565920) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].attn.to_q.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].attn.to_q.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[9].attn.to_q.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].attn.to_q.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[9].attn.to_q.lora_dropout['default_0'], 140533112566016) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].attn.to_q.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].attn.to_q.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[9].attn.to_q.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].attn.to_q.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[9].attn.to_q.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[9].attn.to_q.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].attn.to_q.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[9].attn.to_q.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].attn.to_q.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[9].attn.to_q.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[9].attn.to_q.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].attn.to_q.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[9].attn.to_q.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].attn.to_q._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].attn.to_q._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].attn.to_q.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[9].attn.to_q.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[9].attn.to_q.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].attn.to_q._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[9].attn.to_q._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].attn.to_q._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].attn.to_q._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].attn.to_q._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[9].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[9].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].attn.to_v, accessed_by=DictGetItemGuardAccessor(to_v) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[9].attn.to_v, 140533112558768) # value = attn.to_v(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1718 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].attn.to_v.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[9].attn.to_v.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].attn.to_v.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[9].attn.to_v.training, 140591004393408) # value = attn.to_v(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1718 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].attn.to_v._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].attn.to_v.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[9].attn.to_v.lora_A, 140533112557280) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].attn.to_v.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].attn.to_v.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[9].attn.to_v.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].attn.to_v.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[9].attn.to_v.lora_A['default_0'], 140533112559776) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].attn.to_v.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].attn.to_v.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[9].attn.to_v.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].attn.to_v.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].attn.to_v.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[9].attn.to_v.lora_A['default_0'].weight, 140526268537312) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].attn.to_v.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[9].attn.to_v.lora_B, 140533112561024) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].attn.to_v.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].attn.to_v.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[9].attn.to_v.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].attn.to_v.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[9].attn.to_v.lora_B['default_0'], 140533112556944) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].attn.to_v.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].attn.to_v.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[9].attn.to_v.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].attn.to_v.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[9].attn.to_v.base_layer, 140581770193856) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].attn.to_v.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].attn.to_v.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[9].attn.to_v.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].attn.to_v.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[9].attn.to_v.lora_dropout, 140533112561408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].attn.to_v.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].attn.to_v.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[9].attn.to_v.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].attn.to_v.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[9].attn.to_v.lora_dropout['default_0'], 140533112556800) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].attn.to_v.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].attn.to_v.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[9].attn.to_v.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].attn.to_v.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[9].attn.to_v.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[9].attn.to_v.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].attn.to_v.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[9].attn.to_v.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].attn.to_v.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[9].attn.to_v.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[9].attn.to_v.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].attn.to_v.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[9].attn.to_v.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].attn.to_v._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].attn.to_v._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].attn.to_v.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[9].attn.to_v.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[9].attn.to_v.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].attn.to_v._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[9].attn.to_v._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].attn.to_v._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].attn.to_v._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].attn.to_v._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[9].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[9].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].attn.norm_k, accessed_by=DictGetItemGuardAccessor(norm_k) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[9].attn.norm_k, 140581770193760) # if attn.norm_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1729 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].attn.norm_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[9].attn.norm_k.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].attn.norm_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[9].attn.norm_k.training, 140591004393440) # if attn.norm_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1729 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].attn.norm_k.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[9].attn.norm_k.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].attn.norm_k._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].attn.norm_k.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[9].attn.norm_k.weight, 140581765867824) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].attn.norm_k._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].attn.norm_k._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].attn.norm_k._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].attn.norm_k._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].attn.norm_q, accessed_by=DictGetItemGuardAccessor(norm_q) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[9].attn.norm_q, 140581770193616) # if attn.norm_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1727 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].attn.norm_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[9].attn.norm_q.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].attn.norm_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[9].attn.norm_q.training, 140591004393440) # if attn.norm_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1727 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].attn.norm_q.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[9].attn.norm_q.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].attn.norm_q._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].attn.norm_q.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[9].attn.norm_q.weight, 140581773357792) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].attn.norm_q._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].attn.norm_q._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].attn.norm_q._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].attn.norm_q._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].attn.heads, accessed_by=DictGetItemGuardAccessor(heads) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[9].attn.heads == 24 # head_dim = inner_dim // attn.heads # diffusers/src/diffusers/models/attention_processor.py:1721 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].attn.processor, accessed_by=DictGetItemGuardAccessor(processor) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[9].attn.processor, 93831581524080) # attn_parameters = set(inspect.signature(self.processor.__call__).parameters.keys()) # diffusers/src/diffusers/models/attention_processor.py:479 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[9].attn.processor, 140581770193520) # return self.processor( # diffusers/src/diffusers/models/attention_processor.py:490 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].attn._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].attn._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].attn._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].attn._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].attn.forward, accessed_by=GetAttrGuardAccessor(forward) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].attn.forward, accessed_by=FuncDefaultsGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].attn.forward.__defaults__[0], accessed_by=GetItemGuardAccessor(0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[9].attn.forward.__defaults__[0], 140591004478624) # batch_size, _, _ = hidden_states.shape if encoder_hidden_states is None else encoder_hidden_states.shape # diffusers/src/diffusers/models/attention_processor.py:1713 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].norm, accessed_by=DictGetItemGuardAccessor(norm) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[9].norm, 140581770193184) # norm_hidden_states, gate = self.norm(hidden_states, emb=temb) # diffusers/src/diffusers/models/transformers/transformer_flux.py:88 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].norm.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[9].norm.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].norm.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[9].norm.training, 140591004393440) # norm_hidden_states, gate = self.norm(hidden_states, emb=temb) # diffusers/src/diffusers/models/transformers/transformer_flux.py:88 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].norm._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].norm.norm, accessed_by=DictGetItemGuardAccessor(norm) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[9].norm.norm, 140581770193328) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:171 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].norm.norm.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].norm.norm.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[9].norm.norm.training, 140591004393440) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:171 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].norm.silu, accessed_by=DictGetItemGuardAccessor(silu) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[9].norm.silu, 140581770193232) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].norm.silu.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].norm.silu.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[9].norm.silu.training, 140591004393440) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].norm.linear, accessed_by=DictGetItemGuardAccessor(linear) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[9].norm.linear, 140533116695168) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].norm.linear.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[9].norm.linear.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].norm.linear.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[9].norm.linear.training, 140591004393408) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].norm.linear._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].norm.linear.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[9].norm.linear.lora_A, 140533116697712) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].norm.linear.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].norm.linear.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[9].norm.linear.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].norm.linear.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[9].norm.linear.lora_A['default_0'], 140533116686720) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].norm.linear.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].norm.linear.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[9].norm.linear.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].norm.linear.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].norm.linear.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[9].norm.linear.lora_A['default_0'].weight, 140526268537792) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].norm.linear.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[9].norm.linear.lora_B, 140533116685040) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].norm.linear.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].norm.linear.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[9].norm.linear.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].norm.linear.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[9].norm.linear.lora_B['default_0'], 140533116690320) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].norm.linear.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].norm.linear.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[9].norm.linear.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].norm.linear.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[9].norm.linear.base_layer, 140581770193280) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].norm.linear.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].norm.linear.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[9].norm.linear.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].norm.linear.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[9].norm.linear.lora_dropout, 140533116689072) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].norm.linear.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].norm.linear.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[9].norm.linear.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].norm.linear.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[9].norm.linear.lora_dropout['default_0'], 140533116689168) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].norm.linear.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].norm.linear.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[9].norm.linear.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].norm.linear.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[9].norm.linear.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[9].norm.linear.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].norm.linear.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[9].norm.linear.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].norm.linear.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[9].norm.linear.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[9].norm.linear.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].norm.linear.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[9].norm.linear.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].norm.linear._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].norm.linear._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].norm.linear.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[9].norm.linear.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[9].norm.linear.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].norm.linear._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[9].norm.linear._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].norm.linear._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].norm.linear._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].norm.linear._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[9].norm.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[9].norm.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].norm._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].norm._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].norm._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].norm._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].act_mlp, accessed_by=DictGetItemGuardAccessor(act_mlp) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[9].act_mlp, 140581770193424) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].act_mlp.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].act_mlp.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[9].act_mlp.training, 140591004393440) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].proj_mlp, accessed_by=DictGetItemGuardAccessor(proj_mlp) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[9].proj_mlp, 140533116688160) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].proj_mlp.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[9].proj_mlp.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].proj_mlp.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[9].proj_mlp.training, 140591004393408) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].proj_mlp._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].proj_mlp.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[9].proj_mlp.lora_A, 140533116697616) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].proj_mlp.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].proj_mlp.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[9].proj_mlp.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].proj_mlp.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[9].proj_mlp.lora_A['default_0'], 140533112559920) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].proj_mlp.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].proj_mlp.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[9].proj_mlp.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].proj_mlp.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].proj_mlp.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[9].proj_mlp.lora_A['default_0'].weight, 140526268534352) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].proj_mlp.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[9].proj_mlp.lora_B, 140533116698528) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].proj_mlp.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].proj_mlp.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[9].proj_mlp.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].proj_mlp.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[9].proj_mlp.lora_B['default_0'], 140533112569712) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].proj_mlp.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].proj_mlp.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[9].proj_mlp.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].proj_mlp.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[9].proj_mlp.base_layer, 140581770193376) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].proj_mlp.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].proj_mlp.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[9].proj_mlp.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].proj_mlp.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[9].proj_mlp.lora_dropout, 140533116692144) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].proj_mlp.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].proj_mlp.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[9].proj_mlp.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].proj_mlp.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[9].proj_mlp.lora_dropout['default_0'], 140533116696992) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].proj_mlp.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].proj_mlp.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[9].proj_mlp.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].proj_mlp.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[9].proj_mlp.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[9].proj_mlp.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].proj_mlp.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[9].proj_mlp.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].proj_mlp.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[9].proj_mlp.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[9].proj_mlp.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].proj_mlp.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[9].proj_mlp.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].proj_mlp._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].proj_mlp._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].proj_mlp.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[9].proj_mlp.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[9].proj_mlp.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].proj_mlp._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[9].proj_mlp._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].proj_mlp._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].proj_mlp._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].proj_mlp._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[9].proj_mlp._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[9].proj_mlp._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].proj_out, accessed_by=DictGetItemGuardAccessor(proj_out) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[9].proj_out, 140533112554880) # hidden_states = gate * self.proj_out(hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:98 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].proj_out.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[9].proj_out.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].proj_out.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[9].proj_out.training, 140591004393408) # hidden_states = gate * self.proj_out(hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:98 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].proj_out._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].proj_out.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[9].proj_out.lora_A, 140533112568512) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].proj_out.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].proj_out.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[9].proj_out.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].proj_out.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[9].proj_out.lora_A['default_0'], 140533112567744) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].proj_out.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].proj_out.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[9].proj_out.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].proj_out.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].proj_out.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[9].proj_out.lora_A['default_0'].weight, 140526268533552) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].proj_out.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[9].proj_out.lora_B, 140533112558000) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].proj_out.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].proj_out.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[9].proj_out.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].proj_out.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[9].proj_out.lora_B['default_0'], 140533112559488) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].proj_out.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].proj_out.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[9].proj_out.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].proj_out.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[9].proj_out.base_layer, 140581770193472) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].proj_out.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].proj_out.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[9].proj_out.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].proj_out.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[9].proj_out.lora_dropout, 140533112566976) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].proj_out.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].proj_out.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[9].proj_out.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].proj_out.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[9].proj_out.lora_dropout['default_0'], 140533112568272) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].proj_out.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].proj_out.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[9].proj_out.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].proj_out.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[9].proj_out.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[9].proj_out.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].proj_out.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[9].proj_out.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].proj_out.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[9].proj_out.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[9].proj_out.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].proj_out.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[9].proj_out.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].proj_out._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].proj_out._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].proj_out.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[9].proj_out.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[9].proj_out.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].proj_out._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[9].proj_out._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].proj_out._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].proj_out._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9].proj_out._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[9].proj_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[9].proj_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[9]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10], accessed_by=GetItemGuardAccessor(10) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[10], 140581770193136) # for index_block, block in enumerate(self.single_transformer_blocks): # diffusers/src/diffusers/models/transformers/transformer_flux.py:509 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[10].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[10].training, 140591004393440) # for index_block, block in enumerate(self.single_transformer_blocks): # diffusers/src/diffusers/models/transformers/transformer_flux.py:509 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].attn, accessed_by=DictGetItemGuardAccessor(attn) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[10].attn, 140581770194336) # attn_output = self.attn( # diffusers/src/diffusers/models/transformers/transformer_flux.py:91 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].attn.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[10].attn.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].attn.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[10].attn.training, 140591004393440) # attn_output = self.attn( # diffusers/src/diffusers/models/transformers/transformer_flux.py:91 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].attn._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].attn.to_k, accessed_by=DictGetItemGuardAccessor(to_k) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[10].attn.to_k, 140533112511776) # key = attn.to_k(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1717 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].attn.to_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[10].attn.to_k.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].attn.to_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[10].attn.to_k.training, 140591004393408) # key = attn.to_k(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1717 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].attn.to_k._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].attn.to_k.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[10].attn.to_k.lora_A, 140533112511920) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].attn.to_k.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].attn.to_k.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[10].attn.to_k.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].attn.to_k.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[10].attn.to_k.lora_A['default_0'], 140533112507888) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].attn.to_k.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].attn.to_k.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[10].attn.to_k.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].attn.to_k.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].attn.to_k.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[10].attn.to_k.lora_A['default_0'].weight, 140537328368784) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].attn.to_k.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[10].attn.to_k.lora_B, 140533112515568) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].attn.to_k.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].attn.to_k.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[10].attn.to_k.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].attn.to_k.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[10].attn.to_k.lora_B['default_0'], 140533112514464) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].attn.to_k.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].attn.to_k.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[10].attn.to_k.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].attn.to_k.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[10].attn.to_k.base_layer, 140581770194480) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].attn.to_k.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].attn.to_k.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[10].attn.to_k.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].attn.to_k.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[10].attn.to_k.lora_dropout, 140533112512160) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].attn.to_k.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].attn.to_k.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[10].attn.to_k.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].attn.to_k.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[10].attn.to_k.lora_dropout['default_0'], 140533112510384) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].attn.to_k.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].attn.to_k.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[10].attn.to_k.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].attn.to_k.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[10].attn.to_k.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[10].attn.to_k.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].attn.to_k.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[10].attn.to_k.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].attn.to_k.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[10].attn.to_k.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[10].attn.to_k.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].attn.to_k.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[10].attn.to_k.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].attn.to_k._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].attn.to_k._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].attn.to_k.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[10].attn.to_k.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[10].attn.to_k.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].attn.to_k._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[10].attn.to_k._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].attn.to_k._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].attn.to_k._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].attn.to_k._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[10].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[10].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].attn.to_q, accessed_by=DictGetItemGuardAccessor(to_q) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[10].attn.to_q, 140533112512256) # query = attn.to_q(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1716 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].attn.to_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[10].attn.to_q.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].attn.to_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[10].attn.to_q.training, 140591004393408) # query = attn.to_q(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1716 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].attn.to_q._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].attn.to_q.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[10].attn.to_q.lora_A, 140533112516336) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].attn.to_q.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].attn.to_q.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[10].attn.to_q.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].attn.to_q.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[10].attn.to_q.lora_A['default_0'], 140533112518016) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].attn.to_q.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].attn.to_q.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[10].attn.to_q.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].attn.to_q.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].attn.to_q.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[10].attn.to_q.lora_A['default_0'].weight, 140537328369504) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].attn.to_q.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[10].attn.to_q.lora_B, 140533112507264) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].attn.to_q.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].attn.to_q.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[10].attn.to_q.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].attn.to_q.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[10].attn.to_q.lora_B['default_0'], 140533112517632) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].attn.to_q.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].attn.to_q.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[10].attn.to_q.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].attn.to_q.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[10].attn.to_q.base_layer, 140581770194576) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].attn.to_q.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].attn.to_q.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[10].attn.to_q.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].attn.to_q.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[10].attn.to_q.lora_dropout, 140533112514656) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].attn.to_q.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].attn.to_q.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[10].attn.to_q.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].attn.to_q.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[10].attn.to_q.lora_dropout['default_0'], 140533112513648) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].attn.to_q.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].attn.to_q.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[10].attn.to_q.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].attn.to_q.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[10].attn.to_q.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[10].attn.to_q.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].attn.to_q.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[10].attn.to_q.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].attn.to_q.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[10].attn.to_q.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[10].attn.to_q.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].attn.to_q.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[10].attn.to_q.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].attn.to_q._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].attn.to_q._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].attn.to_q.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[10].attn.to_q.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[10].attn.to_q.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].attn.to_q._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[10].attn.to_q._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].attn.to_q._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].attn.to_q._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].attn.to_q._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[10].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[10].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].attn.to_v, accessed_by=DictGetItemGuardAccessor(to_v) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[10].attn.to_v, 140533112514128) # value = attn.to_v(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1718 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].attn.to_v.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[10].attn.to_v.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].attn.to_v.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[10].attn.to_v.training, 140591004393408) # value = attn.to_v(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1718 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].attn.to_v._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].attn.to_v.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[10].attn.to_v.lora_A, 140533112513696) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].attn.to_v.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].attn.to_v.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[10].attn.to_v.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].attn.to_v.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[10].attn.to_v.lora_A['default_0'], 140533112514224) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].attn.to_v.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].attn.to_v.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[10].attn.to_v.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].attn.to_v.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].attn.to_v.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[10].attn.to_v.lora_A['default_0'].weight, 140526690840832) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].attn.to_v.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[10].attn.to_v.lora_B, 140533112513024) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].attn.to_v.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].attn.to_v.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[10].attn.to_v.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].attn.to_v.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[10].attn.to_v.lora_B['default_0'], 140533112509856) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].attn.to_v.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].attn.to_v.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[10].attn.to_v.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].attn.to_v.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[10].attn.to_v.base_layer, 140581770194624) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].attn.to_v.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].attn.to_v.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[10].attn.to_v.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].attn.to_v.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[10].attn.to_v.lora_dropout, 140533112511728) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].attn.to_v.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].attn.to_v.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[10].attn.to_v.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].attn.to_v.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[10].attn.to_v.lora_dropout['default_0'], 140533112505344) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].attn.to_v.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].attn.to_v.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[10].attn.to_v.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].attn.to_v.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[10].attn.to_v.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[10].attn.to_v.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].attn.to_v.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[10].attn.to_v.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].attn.to_v.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[10].attn.to_v.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[10].attn.to_v.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].attn.to_v.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[10].attn.to_v.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].attn.to_v._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].attn.to_v._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].attn.to_v.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[10].attn.to_v.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[10].attn.to_v.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].attn.to_v._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[10].attn.to_v._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].attn.to_v._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].attn.to_v._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].attn.to_v._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[10].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[10].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].attn.norm_k, accessed_by=DictGetItemGuardAccessor(norm_k) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[10].attn.norm_k, 140581770194528) # if attn.norm_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1729 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].attn.norm_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[10].attn.norm_k.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].attn.norm_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[10].attn.norm_k.training, 140591004393440) # if attn.norm_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1729 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].attn.norm_k.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[10].attn.norm_k.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].attn.norm_k._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].attn.norm_k.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[10].attn.norm_k.weight, 140581766061632) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].attn.norm_k._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].attn.norm_k._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].attn.norm_k._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].attn.norm_k._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].attn.norm_q, accessed_by=DictGetItemGuardAccessor(norm_q) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[10].attn.norm_q, 140581770194384) # if attn.norm_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1727 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].attn.norm_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[10].attn.norm_q.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].attn.norm_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[10].attn.norm_q.training, 140591004393440) # if attn.norm_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1727 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].attn.norm_q.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[10].attn.norm_q.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].attn.norm_q._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].attn.norm_q.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[10].attn.norm_q.weight, 140581765892352) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].attn.norm_q._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].attn.norm_q._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].attn.norm_q._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].attn.norm_q._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].attn.heads, accessed_by=DictGetItemGuardAccessor(heads) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[10].attn.heads == 24 # head_dim = inner_dim // attn.heads # diffusers/src/diffusers/models/attention_processor.py:1721 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].attn.processor, accessed_by=DictGetItemGuardAccessor(processor) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[10].attn.processor, 93831581524080) # attn_parameters = set(inspect.signature(self.processor.__call__).parameters.keys()) # diffusers/src/diffusers/models/attention_processor.py:479 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[10].attn.processor, 140581770194288) # return self.processor( # diffusers/src/diffusers/models/attention_processor.py:490 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].attn._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].attn._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].attn._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].attn._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].attn.forward, accessed_by=GetAttrGuardAccessor(forward) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].attn.forward, accessed_by=FuncDefaultsGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].attn.forward.__defaults__[0], accessed_by=GetItemGuardAccessor(0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[10].attn.forward.__defaults__[0], 140591004478624) # batch_size, _, _ = hidden_states.shape if encoder_hidden_states is None else encoder_hidden_states.shape # diffusers/src/diffusers/models/attention_processor.py:1713 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].norm, accessed_by=DictGetItemGuardAccessor(norm) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[10].norm, 140581770193952) # norm_hidden_states, gate = self.norm(hidden_states, emb=temb) # diffusers/src/diffusers/models/transformers/transformer_flux.py:88 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].norm.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[10].norm.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].norm.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[10].norm.training, 140591004393440) # norm_hidden_states, gate = self.norm(hidden_states, emb=temb) # diffusers/src/diffusers/models/transformers/transformer_flux.py:88 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].norm._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].norm.norm, accessed_by=DictGetItemGuardAccessor(norm) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[10].norm.norm, 140581770194096) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:171 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].norm.norm.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].norm.norm.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[10].norm.norm.training, 140591004393440) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:171 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].norm.silu, accessed_by=DictGetItemGuardAccessor(silu) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[10].norm.silu, 140581770194000) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].norm.silu.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].norm.silu.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[10].norm.silu.training, 140591004393440) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].norm.linear, accessed_by=DictGetItemGuardAccessor(linear) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[10].norm.linear, 140533112554832) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].norm.linear.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[10].norm.linear.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].norm.linear.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[10].norm.linear.training, 140591004393408) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].norm.linear._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].norm.linear.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[10].norm.linear.lora_A, 140533112566064) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].norm.linear.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].norm.linear.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[10].norm.linear.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].norm.linear.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[10].norm.linear.lora_A['default_0'], 140533112568176) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].norm.linear.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].norm.linear.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[10].norm.linear.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].norm.linear.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].norm.linear.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[10].norm.linear.lora_A['default_0'].weight, 140537328359824) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].norm.linear.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[10].norm.linear.lora_B, 140533112557712) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].norm.linear.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].norm.linear.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[10].norm.linear.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].norm.linear.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[10].norm.linear.lora_B['default_0'], 140533112555456) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].norm.linear.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].norm.linear.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[10].norm.linear.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].norm.linear.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[10].norm.linear.base_layer, 140581770194048) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].norm.linear.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].norm.linear.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[10].norm.linear.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].norm.linear.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[10].norm.linear.lora_dropout, 140533112556032) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].norm.linear.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].norm.linear.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[10].norm.linear.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].norm.linear.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[10].norm.linear.lora_dropout['default_0'], 140533112569328) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].norm.linear.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].norm.linear.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[10].norm.linear.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].norm.linear.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[10].norm.linear.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[10].norm.linear.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].norm.linear.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[10].norm.linear.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].norm.linear.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[10].norm.linear.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[10].norm.linear.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].norm.linear.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[10].norm.linear.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].norm.linear._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].norm.linear._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].norm.linear.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[10].norm.linear.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[10].norm.linear.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].norm.linear._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[10].norm.linear._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].norm.linear._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].norm.linear._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].norm.linear._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[10].norm.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[10].norm.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].norm._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].norm._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].norm._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].norm._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].act_mlp, accessed_by=DictGetItemGuardAccessor(act_mlp) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[10].act_mlp, 140581770194192) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].act_mlp.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].act_mlp.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[10].act_mlp.training, 140591004393440) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].proj_mlp, accessed_by=DictGetItemGuardAccessor(proj_mlp) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[10].proj_mlp, 140533112569616) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].proj_mlp.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[10].proj_mlp.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].proj_mlp.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[10].proj_mlp.training, 140591004393408) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].proj_mlp._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].proj_mlp.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[10].proj_mlp.lora_A, 140533112564144) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].proj_mlp.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].proj_mlp.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[10].proj_mlp.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].proj_mlp.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[10].proj_mlp.lora_A['default_0'], 140533112516144) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].proj_mlp.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].proj_mlp.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[10].proj_mlp.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].proj_mlp.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].proj_mlp.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[10].proj_mlp.lora_A['default_0'].weight, 140537328369584) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].proj_mlp.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[10].proj_mlp.lora_B, 140533112559152) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].proj_mlp.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].proj_mlp.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[10].proj_mlp.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].proj_mlp.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[10].proj_mlp.lora_B['default_0'], 140533112512832) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].proj_mlp.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].proj_mlp.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[10].proj_mlp.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].proj_mlp.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[10].proj_mlp.base_layer, 140581770194144) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].proj_mlp.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].proj_mlp.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[10].proj_mlp.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].proj_mlp.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[10].proj_mlp.lora_dropout, 140533112561312) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].proj_mlp.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].proj_mlp.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[10].proj_mlp.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].proj_mlp.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[10].proj_mlp.lora_dropout['default_0'], 140533112566304) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].proj_mlp.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].proj_mlp.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[10].proj_mlp.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].proj_mlp.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[10].proj_mlp.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[10].proj_mlp.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].proj_mlp.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[10].proj_mlp.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].proj_mlp.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[10].proj_mlp.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[10].proj_mlp.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].proj_mlp.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[10].proj_mlp.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].proj_mlp._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].proj_mlp._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].proj_mlp.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[10].proj_mlp.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[10].proj_mlp.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].proj_mlp._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[10].proj_mlp._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].proj_mlp._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].proj_mlp._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].proj_mlp._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[10].proj_mlp._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[10].proj_mlp._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].proj_out, accessed_by=DictGetItemGuardAccessor(proj_out) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[10].proj_out, 140533112516048) # hidden_states = gate * self.proj_out(hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:98 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].proj_out.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[10].proj_out.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].proj_out.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[10].proj_out.training, 140591004393408) # hidden_states = gate * self.proj_out(hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:98 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].proj_out._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].proj_out.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[10].proj_out.lora_A, 140533112507216) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].proj_out.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].proj_out.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[10].proj_out.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].proj_out.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[10].proj_out.lora_A['default_0'], 140533112516480) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].proj_out.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].proj_out.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[10].proj_out.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].proj_out.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].proj_out.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[10].proj_out.lora_A['default_0'].weight, 140537328367424) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].proj_out.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[10].proj_out.lora_B, 140533112511968) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].proj_out.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].proj_out.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[10].proj_out.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].proj_out.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[10].proj_out.lora_B['default_0'], 140533112516672) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].proj_out.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].proj_out.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[10].proj_out.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].proj_out.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[10].proj_out.base_layer, 140581770194240) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].proj_out.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].proj_out.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[10].proj_out.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].proj_out.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[10].proj_out.lora_dropout, 140533112508320) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].proj_out.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].proj_out.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[10].proj_out.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].proj_out.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[10].proj_out.lora_dropout['default_0'], 140533112514896) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].proj_out.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].proj_out.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[10].proj_out.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].proj_out.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[10].proj_out.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[10].proj_out.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].proj_out.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[10].proj_out.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].proj_out.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[10].proj_out.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[10].proj_out.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].proj_out.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[10].proj_out.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].proj_out._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].proj_out._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].proj_out.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[10].proj_out.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[10].proj_out.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].proj_out._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[10].proj_out._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].proj_out._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].proj_out._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10].proj_out._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[10].proj_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[10].proj_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[10]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11], accessed_by=GetItemGuardAccessor(11) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[11], 140581770193904) # for index_block, block in enumerate(self.single_transformer_blocks): # diffusers/src/diffusers/models/transformers/transformer_flux.py:509 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[11].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[11].training, 140591004393440) # for index_block, block in enumerate(self.single_transformer_blocks): # diffusers/src/diffusers/models/transformers/transformer_flux.py:509 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].attn, accessed_by=DictGetItemGuardAccessor(attn) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[11].attn, 140581770195152) # attn_output = self.attn( # diffusers/src/diffusers/models/transformers/transformer_flux.py:91 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].attn.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[11].attn.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].attn.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[11].attn.training, 140591004393440) # attn_output = self.attn( # diffusers/src/diffusers/models/transformers/transformer_flux.py:91 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].attn._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].attn.to_k, accessed_by=DictGetItemGuardAccessor(to_k) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[11].attn.to_k, 140533113769936) # key = attn.to_k(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1717 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].attn.to_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[11].attn.to_k.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].attn.to_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[11].attn.to_k.training, 140591004393408) # key = attn.to_k(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1717 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].attn.to_k._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].attn.to_k.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[11].attn.to_k.lora_A, 140533116466272) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].attn.to_k.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].attn.to_k.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[11].attn.to_k.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].attn.to_k.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[11].attn.to_k.lora_A['default_0'], 140533116466464) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].attn.to_k.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].attn.to_k.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[11].attn.to_k.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].attn.to_k.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].attn.to_k.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[11].attn.to_k.lora_A['default_0'].weight, 140526554555120) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].attn.to_k.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[11].attn.to_k.lora_B, 140533116459600) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].attn.to_k.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].attn.to_k.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[11].attn.to_k.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].attn.to_k.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[11].attn.to_k.lora_B['default_0'], 140533116466848) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].attn.to_k.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].attn.to_k.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[11].attn.to_k.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].attn.to_k.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[11].attn.to_k.base_layer, 140581770195344) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].attn.to_k.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].attn.to_k.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[11].attn.to_k.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].attn.to_k.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[11].attn.to_k.lora_dropout, 140533116463152) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].attn.to_k.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].attn.to_k.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[11].attn.to_k.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].attn.to_k.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[11].attn.to_k.lora_dropout['default_0'], 140533116466032) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].attn.to_k.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].attn.to_k.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[11].attn.to_k.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].attn.to_k.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[11].attn.to_k.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[11].attn.to_k.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].attn.to_k.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[11].attn.to_k.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].attn.to_k.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[11].attn.to_k.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[11].attn.to_k.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].attn.to_k.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[11].attn.to_k.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].attn.to_k._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].attn.to_k._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].attn.to_k.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[11].attn.to_k.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[11].attn.to_k.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].attn.to_k._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[11].attn.to_k._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].attn.to_k._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].attn.to_k._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].attn.to_k._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[11].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[11].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].attn.to_q, accessed_by=DictGetItemGuardAccessor(to_q) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[11].attn.to_q, 140533115553440) # query = attn.to_q(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1716 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].attn.to_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[11].attn.to_q.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].attn.to_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[11].attn.to_q.training, 140591004393408) # query = attn.to_q(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1716 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].attn.to_q._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].attn.to_q.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[11].attn.to_q.lora_A, 140533115556368) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].attn.to_q.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].attn.to_q.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[11].attn.to_q.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].attn.to_q.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[11].attn.to_q.lora_A['default_0'], 140533115552288) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].attn.to_q.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].attn.to_q.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[11].attn.to_q.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].attn.to_q.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].attn.to_q.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[11].attn.to_q.lora_A['default_0'].weight, 140526554561440) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].attn.to_q.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[11].attn.to_q.lora_B, 140533115555552) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].attn.to_q.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].attn.to_q.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[11].attn.to_q.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].attn.to_q.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[11].attn.to_q.lora_B['default_0'], 140533115564528) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].attn.to_q.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].attn.to_q.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[11].attn.to_q.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].attn.to_q.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[11].attn.to_q.base_layer, 140581770195440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].attn.to_q.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].attn.to_q.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[11].attn.to_q.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].attn.to_q.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[11].attn.to_q.lora_dropout, 140533115554544) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].attn.to_q.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].attn.to_q.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[11].attn.to_q.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].attn.to_q.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[11].attn.to_q.lora_dropout['default_0'], 140533115555024) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].attn.to_q.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].attn.to_q.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[11].attn.to_q.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].attn.to_q.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[11].attn.to_q.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[11].attn.to_q.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].attn.to_q.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[11].attn.to_q.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].attn.to_q.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[11].attn.to_q.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[11].attn.to_q.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].attn.to_q.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[11].attn.to_q.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].attn.to_q._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].attn.to_q._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].attn.to_q.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[11].attn.to_q.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[11].attn.to_q.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].attn.to_q._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[11].attn.to_q._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].attn.to_q._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].attn.to_q._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].attn.to_q._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[11].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[11].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].attn.to_v, accessed_by=DictGetItemGuardAccessor(to_v) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[11].attn.to_v, 140533116467856) # value = attn.to_v(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1718 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].attn.to_v.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[11].attn.to_v.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].attn.to_v.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[11].attn.to_v.training, 140591004393408) # value = attn.to_v(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1718 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].attn.to_v._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].attn.to_v.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[11].attn.to_v.lora_A, 140533118542816) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].attn.to_v.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].attn.to_v.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[11].attn.to_v.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].attn.to_v.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[11].attn.to_v.lora_A['default_0'], 140533118546128) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].attn.to_v.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].attn.to_v.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[11].attn.to_v.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].attn.to_v.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].attn.to_v.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[11].attn.to_v.lora_A['default_0'].weight, 140526554553520) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].attn.to_v.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[11].attn.to_v.lora_B, 140533118547712) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].attn.to_v.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].attn.to_v.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[11].attn.to_v.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].attn.to_v.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[11].attn.to_v.lora_B['default_0'], 140533118541472) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].attn.to_v.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].attn.to_v.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[11].attn.to_v.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].attn.to_v.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[11].attn.to_v.base_layer, 140581770195488) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].attn.to_v.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].attn.to_v.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[11].attn.to_v.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].attn.to_v.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[11].attn.to_v.lora_dropout, 140533116466128) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].attn.to_v.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].attn.to_v.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[11].attn.to_v.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].attn.to_v.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[11].attn.to_v.lora_dropout['default_0'], 140533116453936) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].attn.to_v.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].attn.to_v.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[11].attn.to_v.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].attn.to_v.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[11].attn.to_v.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[11].attn.to_v.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].attn.to_v.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[11].attn.to_v.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].attn.to_v.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[11].attn.to_v.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[11].attn.to_v.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].attn.to_v.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[11].attn.to_v.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].attn.to_v._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].attn.to_v._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].attn.to_v.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[11].attn.to_v.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[11].attn.to_v.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].attn.to_v._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[11].attn.to_v._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].attn.to_v._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].attn.to_v._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].attn.to_v._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[11].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[11].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].attn.norm_k, accessed_by=DictGetItemGuardAccessor(norm_k) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[11].attn.norm_k, 140581770195392) # if attn.norm_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1729 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].attn.norm_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[11].attn.norm_k.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].attn.norm_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[11].attn.norm_k.training, 140591004393440) # if attn.norm_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1729 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].attn.norm_k.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[11].attn.norm_k.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].attn.norm_k._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].attn.norm_k.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[11].attn.norm_k.weight, 140581783350912) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].attn.norm_k._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].attn.norm_k._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].attn.norm_k._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].attn.norm_k._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].attn.norm_q, accessed_by=DictGetItemGuardAccessor(norm_q) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[11].attn.norm_q, 140581770195200) # if attn.norm_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1727 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].attn.norm_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[11].attn.norm_q.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].attn.norm_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[11].attn.norm_q.training, 140591004393440) # if attn.norm_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1727 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].attn.norm_q.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[11].attn.norm_q.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].attn.norm_q._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].attn.norm_q.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[11].attn.norm_q.weight, 140581783354592) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].attn.norm_q._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].attn.norm_q._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].attn.norm_q._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].attn.norm_q._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].attn.heads, accessed_by=DictGetItemGuardAccessor(heads) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[11].attn.heads == 24 # head_dim = inner_dim // attn.heads # diffusers/src/diffusers/models/attention_processor.py:1721 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].attn.processor, accessed_by=DictGetItemGuardAccessor(processor) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[11].attn.processor, 93831581524080) # attn_parameters = set(inspect.signature(self.processor.__call__).parameters.keys()) # diffusers/src/diffusers/models/attention_processor.py:479 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[11].attn.processor, 140581770195104) # return self.processor( # diffusers/src/diffusers/models/attention_processor.py:490 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].attn._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].attn._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].attn._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].attn._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].attn.forward, accessed_by=GetAttrGuardAccessor(forward) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].attn.forward, accessed_by=FuncDefaultsGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].attn.forward.__defaults__[0], accessed_by=GetItemGuardAccessor(0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[11].attn.forward.__defaults__[0], 140591004478624) # batch_size, _, _ = hidden_states.shape if encoder_hidden_states is None else encoder_hidden_states.shape # diffusers/src/diffusers/models/attention_processor.py:1713 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].norm, accessed_by=DictGetItemGuardAccessor(norm) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[11].norm, 140581770194720) # norm_hidden_states, gate = self.norm(hidden_states, emb=temb) # diffusers/src/diffusers/models/transformers/transformer_flux.py:88 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].norm.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[11].norm.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].norm.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[11].norm.training, 140591004393440) # norm_hidden_states, gate = self.norm(hidden_states, emb=temb) # diffusers/src/diffusers/models/transformers/transformer_flux.py:88 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].norm._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].norm.norm, accessed_by=DictGetItemGuardAccessor(norm) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[11].norm.norm, 140581770194864) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:171 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].norm.norm.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].norm.norm.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[11].norm.norm.training, 140591004393440) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:171 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].norm.silu, accessed_by=DictGetItemGuardAccessor(silu) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[11].norm.silu, 140581770194768) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].norm.silu.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].norm.silu.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[11].norm.silu.training, 140591004393440) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].norm.linear, accessed_by=DictGetItemGuardAccessor(linear) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[11].norm.linear, 140533112506112) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].norm.linear.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[11].norm.linear.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].norm.linear.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[11].norm.linear.training, 140591004393408) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].norm.linear._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].norm.linear.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[11].norm.linear.lora_A, 140533112504432) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].norm.linear.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].norm.linear.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[11].norm.linear.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].norm.linear.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[11].norm.linear.lora_A['default_0'], 140533112506064) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].norm.linear.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].norm.linear.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[11].norm.linear.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].norm.linear.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].norm.linear.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[11].norm.linear.lora_A['default_0'].weight, 140526690831472) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].norm.linear.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[11].norm.linear.lora_B, 140533112515088) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].norm.linear.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].norm.linear.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[11].norm.linear.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].norm.linear.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[11].norm.linear.lora_B['default_0'], 140533112506352) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].norm.linear.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].norm.linear.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[11].norm.linear.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].norm.linear.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[11].norm.linear.base_layer, 140581770194816) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].norm.linear.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].norm.linear.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[11].norm.linear.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].norm.linear.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[11].norm.linear.lora_dropout, 140533112513600) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].norm.linear.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].norm.linear.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[11].norm.linear.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].norm.linear.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[11].norm.linear.lora_dropout['default_0'], 140533112505920) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].norm.linear.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].norm.linear.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[11].norm.linear.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].norm.linear.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[11].norm.linear.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[11].norm.linear.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].norm.linear.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[11].norm.linear.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].norm.linear.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[11].norm.linear.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[11].norm.linear.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].norm.linear.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[11].norm.linear.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].norm.linear._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].norm.linear._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].norm.linear.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[11].norm.linear.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[11].norm.linear.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].norm.linear._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[11].norm.linear._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].norm.linear._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].norm.linear._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].norm.linear._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[11].norm.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[11].norm.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].norm._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].norm._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].norm._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].norm._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].act_mlp, accessed_by=DictGetItemGuardAccessor(act_mlp) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[11].act_mlp, 140581770195008) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].act_mlp.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].act_mlp.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[11].act_mlp.training, 140591004393440) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].proj_mlp, accessed_by=DictGetItemGuardAccessor(proj_mlp) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[11].proj_mlp, 140533112504576) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].proj_mlp.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[11].proj_mlp.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].proj_mlp.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[11].proj_mlp.training, 140591004393408) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].proj_mlp._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].proj_mlp.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[11].proj_mlp.lora_A, 140533112506448) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].proj_mlp.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].proj_mlp.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[11].proj_mlp.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].proj_mlp.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[11].proj_mlp.lora_A['default_0'], 140533115562704) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].proj_mlp.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].proj_mlp.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[11].proj_mlp.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].proj_mlp.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].proj_mlp.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[11].proj_mlp.lora_A['default_0'].weight, 140526690832352) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].proj_mlp.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[11].proj_mlp.lora_B, 140533112505296) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].proj_mlp.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].proj_mlp.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[11].proj_mlp.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].proj_mlp.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[11].proj_mlp.lora_B['default_0'], 140533115564288) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].proj_mlp.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].proj_mlp.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[11].proj_mlp.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].proj_mlp.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[11].proj_mlp.base_layer, 140581770194912) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].proj_mlp.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].proj_mlp.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[11].proj_mlp.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].proj_mlp.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[11].proj_mlp.lora_dropout, 140533112506016) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].proj_mlp.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].proj_mlp.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[11].proj_mlp.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].proj_mlp.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[11].proj_mlp.lora_dropout['default_0'], 140533112504816) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].proj_mlp.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].proj_mlp.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[11].proj_mlp.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].proj_mlp.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[11].proj_mlp.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[11].proj_mlp.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].proj_mlp.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[11].proj_mlp.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].proj_mlp.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[11].proj_mlp.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[11].proj_mlp.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].proj_mlp.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[11].proj_mlp.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].proj_mlp._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].proj_mlp._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].proj_mlp.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[11].proj_mlp.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[11].proj_mlp.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].proj_mlp._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[11].proj_mlp._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].proj_mlp._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].proj_mlp._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].proj_mlp._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[11].proj_mlp._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[11].proj_mlp._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].proj_out, accessed_by=DictGetItemGuardAccessor(proj_out) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[11].proj_out, 140533115564768) # hidden_states = gate * self.proj_out(hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:98 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].proj_out.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[11].proj_out.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].proj_out.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[11].proj_out.training, 140591004393408) # hidden_states = gate * self.proj_out(hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:98 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].proj_out._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].proj_out.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[11].proj_out.lora_A, 140533115556704) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].proj_out.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].proj_out.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[11].proj_out.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].proj_out.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[11].proj_out.lora_A['default_0'], 140533115561744) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].proj_out.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].proj_out.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[11].proj_out.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].proj_out.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].proj_out.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[11].proj_out.lora_A['default_0'].weight, 140526554546880) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].proj_out.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[11].proj_out.lora_B, 140533115564960) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].proj_out.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].proj_out.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[11].proj_out.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].proj_out.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[11].proj_out.lora_B['default_0'], 140533115562224) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].proj_out.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].proj_out.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[11].proj_out.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].proj_out.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[11].proj_out.base_layer, 140581770195056) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].proj_out.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].proj_out.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[11].proj_out.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].proj_out.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[11].proj_out.lora_dropout, 140533115555408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].proj_out.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].proj_out.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[11].proj_out.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].proj_out.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[11].proj_out.lora_dropout['default_0'], 140533115554160) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].proj_out.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].proj_out.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[11].proj_out.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].proj_out.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[11].proj_out.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[11].proj_out.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].proj_out.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[11].proj_out.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].proj_out.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[11].proj_out.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[11].proj_out.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].proj_out.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[11].proj_out.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].proj_out._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].proj_out._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].proj_out.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[11].proj_out.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[11].proj_out.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].proj_out._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[11].proj_out._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].proj_out._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].proj_out._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11].proj_out._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[11].proj_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[11].proj_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[11]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12], accessed_by=GetItemGuardAccessor(12) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[12], 140581770194672) # for index_block, block in enumerate(self.single_transformer_blocks): # diffusers/src/diffusers/models/transformers/transformer_flux.py:509 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[12].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[12].training, 140591004393440) # for index_block, block in enumerate(self.single_transformer_blocks): # diffusers/src/diffusers/models/transformers/transformer_flux.py:509 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].attn, accessed_by=DictGetItemGuardAccessor(attn) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[12].attn, 140581770195968) # attn_output = self.attn( # diffusers/src/diffusers/models/transformers/transformer_flux.py:91 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].attn.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[12].attn.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].attn.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[12].attn.training, 140591004393440) # attn_output = self.attn( # diffusers/src/diffusers/models/transformers/transformer_flux.py:91 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].attn._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].attn.to_k, accessed_by=DictGetItemGuardAccessor(to_k) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[12].attn.to_k, 140533118184048) # key = attn.to_k(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1717 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].attn.to_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[12].attn.to_k.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].attn.to_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[12].attn.to_k.training, 140591004393408) # key = attn.to_k(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1717 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].attn.to_k._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].attn.to_k.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[12].attn.to_k.lora_A, 140533118175744) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].attn.to_k.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].attn.to_k.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[12].attn.to_k.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].attn.to_k.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[12].attn.to_k.lora_A['default_0'], 140533118174256) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].attn.to_k.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].attn.to_k.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[12].attn.to_k.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].attn.to_k.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].attn.to_k.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[12].attn.to_k.lora_A['default_0'].weight, 140526787100512) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].attn.to_k.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[12].attn.to_k.lora_B, 140533118181840) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].attn.to_k.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].attn.to_k.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[12].attn.to_k.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].attn.to_k.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[12].attn.to_k.lora_B['default_0'], 140533118180640) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].attn.to_k.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].attn.to_k.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[12].attn.to_k.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].attn.to_k.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[12].attn.to_k.base_layer, 140581770196112) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].attn.to_k.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].attn.to_k.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[12].attn.to_k.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].attn.to_k.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[12].attn.to_k.lora_dropout, 140533118185872) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].attn.to_k.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].attn.to_k.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[12].attn.to_k.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].attn.to_k.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[12].attn.to_k.lora_dropout['default_0'], 140533118179104) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].attn.to_k.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].attn.to_k.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[12].attn.to_k.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].attn.to_k.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[12].attn.to_k.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[12].attn.to_k.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].attn.to_k.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[12].attn.to_k.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].attn.to_k.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[12].attn.to_k.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[12].attn.to_k.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].attn.to_k.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[12].attn.to_k.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].attn.to_k._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].attn.to_k._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].attn.to_k.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[12].attn.to_k.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[12].attn.to_k.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].attn.to_k._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[12].attn.to_k._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].attn.to_k._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].attn.to_k._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].attn.to_k._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[12].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[12].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].attn.to_q, accessed_by=DictGetItemGuardAccessor(to_q) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[12].attn.to_q, 140533119741200) # query = attn.to_q(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1716 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].attn.to_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[12].attn.to_q.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].attn.to_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[12].attn.to_q.training, 140591004393408) # query = attn.to_q(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1716 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].attn.to_q._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].attn.to_q.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[12].attn.to_q.lora_A, 140533119736256) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].attn.to_q.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].attn.to_q.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[12].attn.to_q.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].attn.to_q.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[12].attn.to_q.lora_A['default_0'], 140533118188272) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].attn.to_q.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].attn.to_q.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[12].attn.to_q.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].attn.to_q.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].attn.to_q.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[12].attn.to_q.lora_A['default_0'].weight, 140533138353312) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].attn.to_q.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[12].attn.to_q.lora_B, 140533119732656) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].attn.to_q.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].attn.to_q.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[12].attn.to_q.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].attn.to_q.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[12].attn.to_q.lora_B['default_0'], 140533118181072) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].attn.to_q.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].attn.to_q.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[12].attn.to_q.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].attn.to_q.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[12].attn.to_q.base_layer, 140581770196208) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].attn.to_q.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].attn.to_q.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[12].attn.to_q.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].attn.to_q.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[12].attn.to_q.lora_dropout, 140533119736592) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].attn.to_q.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].attn.to_q.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[12].attn.to_q.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].attn.to_q.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[12].attn.to_q.lora_dropout['default_0'], 140533119740144) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].attn.to_q.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].attn.to_q.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[12].attn.to_q.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].attn.to_q.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[12].attn.to_q.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[12].attn.to_q.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].attn.to_q.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[12].attn.to_q.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].attn.to_q.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[12].attn.to_q.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[12].attn.to_q.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].attn.to_q.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[12].attn.to_q.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].attn.to_q._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].attn.to_q._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].attn.to_q.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[12].attn.to_q.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[12].attn.to_q.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].attn.to_q._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[12].attn.to_q._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].attn.to_q._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].attn.to_q._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].attn.to_q._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[12].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[12].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].attn.to_v, accessed_by=DictGetItemGuardAccessor(to_v) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[12].attn.to_v, 140533115555744) # value = attn.to_v(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1718 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].attn.to_v.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[12].attn.to_v.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].attn.to_v.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[12].attn.to_v.training, 140591004393408) # value = attn.to_v(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1718 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].attn.to_v._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].attn.to_v.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[12].attn.to_v.lora_A, 140533118095792) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].attn.to_v.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].attn.to_v.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[12].attn.to_v.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].attn.to_v.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[12].attn.to_v.lora_A['default_0'], 140533118096464) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].attn.to_v.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].attn.to_v.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[12].attn.to_v.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].attn.to_v.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].attn.to_v.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[12].attn.to_v.lora_A['default_0'].weight, 140526787087712) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].attn.to_v.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[12].attn.to_v.lora_B, 140533118107168) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].attn.to_v.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].attn.to_v.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[12].attn.to_v.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].attn.to_v.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[12].attn.to_v.lora_B['default_0'], 140533118102752) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].attn.to_v.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].attn.to_v.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[12].attn.to_v.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].attn.to_v.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[12].attn.to_v.base_layer, 140581770196256) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].attn.to_v.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].attn.to_v.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[12].attn.to_v.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].attn.to_v.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[12].attn.to_v.lora_dropout, 140533118093440) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].attn.to_v.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].attn.to_v.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[12].attn.to_v.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].attn.to_v.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[12].attn.to_v.lora_dropout['default_0'], 140533118099632) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].attn.to_v.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].attn.to_v.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[12].attn.to_v.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].attn.to_v.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[12].attn.to_v.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[12].attn.to_v.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].attn.to_v.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[12].attn.to_v.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].attn.to_v.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[12].attn.to_v.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[12].attn.to_v.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].attn.to_v.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[12].attn.to_v.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].attn.to_v._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].attn.to_v._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].attn.to_v.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[12].attn.to_v.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[12].attn.to_v.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].attn.to_v._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[12].attn.to_v._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].attn.to_v._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].attn.to_v._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].attn.to_v._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[12].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[12].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].attn.norm_k, accessed_by=DictGetItemGuardAccessor(norm_k) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[12].attn.norm_k, 140581770196160) # if attn.norm_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1729 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].attn.norm_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[12].attn.norm_k.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].attn.norm_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[12].attn.norm_k.training, 140591004393440) # if attn.norm_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1729 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].attn.norm_k.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[12].attn.norm_k.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].attn.norm_k._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].attn.norm_k.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[12].attn.norm_k.weight, 140581783352912) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].attn.norm_k._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].attn.norm_k._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].attn.norm_k._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].attn.norm_k._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].attn.norm_q, accessed_by=DictGetItemGuardAccessor(norm_q) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[12].attn.norm_q, 140581770196016) # if attn.norm_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1727 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].attn.norm_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[12].attn.norm_q.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].attn.norm_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[12].attn.norm_q.training, 140591004393440) # if attn.norm_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1727 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].attn.norm_q.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[12].attn.norm_q.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].attn.norm_q._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].attn.norm_q.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[12].attn.norm_q.weight, 140581783352432) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].attn.norm_q._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].attn.norm_q._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].attn.norm_q._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].attn.norm_q._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].attn.heads, accessed_by=DictGetItemGuardAccessor(heads) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[12].attn.heads == 24 # head_dim = inner_dim // attn.heads # diffusers/src/diffusers/models/attention_processor.py:1721 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].attn.processor, accessed_by=DictGetItemGuardAccessor(processor) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[12].attn.processor, 93831581524080) # attn_parameters = set(inspect.signature(self.processor.__call__).parameters.keys()) # diffusers/src/diffusers/models/attention_processor.py:479 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[12].attn.processor, 140581770195920) # return self.processor( # diffusers/src/diffusers/models/attention_processor.py:490 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].attn._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].attn._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].attn._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].attn._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].attn.forward, accessed_by=GetAttrGuardAccessor(forward) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].attn.forward, accessed_by=FuncDefaultsGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].attn.forward.__defaults__[0], accessed_by=GetItemGuardAccessor(0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[12].attn.forward.__defaults__[0], 140591004478624) # batch_size, _, _ = hidden_states.shape if encoder_hidden_states is None else encoder_hidden_states.shape # diffusers/src/diffusers/models/attention_processor.py:1713 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].norm, accessed_by=DictGetItemGuardAccessor(norm) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[12].norm, 140581770195584) # norm_hidden_states, gate = self.norm(hidden_states, emb=temb) # diffusers/src/diffusers/models/transformers/transformer_flux.py:88 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].norm.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[12].norm.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].norm.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[12].norm.training, 140591004393440) # norm_hidden_states, gate = self.norm(hidden_states, emb=temb) # diffusers/src/diffusers/models/transformers/transformer_flux.py:88 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].norm._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].norm.norm, accessed_by=DictGetItemGuardAccessor(norm) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[12].norm.norm, 140581770195728) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:171 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].norm.norm.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].norm.norm.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[12].norm.norm.training, 140591004393440) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:171 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].norm.silu, accessed_by=DictGetItemGuardAccessor(silu) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[12].norm.silu, 140581770195632) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].norm.silu.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].norm.silu.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[12].norm.silu.training, 140591004393440) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].norm.linear, accessed_by=DictGetItemGuardAccessor(linear) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[12].norm.linear, 140533118545168) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].norm.linear.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[12].norm.linear.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].norm.linear.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[12].norm.linear.training, 140591004393408) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].norm.linear._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].norm.linear.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[12].norm.linear.lora_A, 140533118543920) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].norm.linear.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].norm.linear.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[12].norm.linear.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].norm.linear.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[12].norm.linear.lora_A['default_0'], 140533118541664) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].norm.linear.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].norm.linear.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[12].norm.linear.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].norm.linear.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].norm.linear.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[12].norm.linear.lora_A['default_0'].weight, 140533111949408) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].norm.linear.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[12].norm.linear.lora_B, 140533118543344) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].norm.linear.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].norm.linear.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[12].norm.linear.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].norm.linear.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[12].norm.linear.lora_B['default_0'], 140533118541904) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].norm.linear.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].norm.linear.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[12].norm.linear.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].norm.linear.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[12].norm.linear.base_layer, 140581770195680) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].norm.linear.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].norm.linear.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[12].norm.linear.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].norm.linear.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[12].norm.linear.lora_dropout, 140533118538112) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].norm.linear.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].norm.linear.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[12].norm.linear.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].norm.linear.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[12].norm.linear.lora_dropout['default_0'], 140533118542144) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].norm.linear.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].norm.linear.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[12].norm.linear.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].norm.linear.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[12].norm.linear.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[12].norm.linear.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].norm.linear.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[12].norm.linear.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].norm.linear.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[12].norm.linear.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[12].norm.linear.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].norm.linear.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[12].norm.linear.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].norm.linear._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].norm.linear._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].norm.linear.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[12].norm.linear.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[12].norm.linear.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].norm.linear._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[12].norm.linear._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].norm.linear._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].norm.linear._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].norm.linear._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[12].norm.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[12].norm.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].norm._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].norm._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].norm._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].norm._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].act_mlp, accessed_by=DictGetItemGuardAccessor(act_mlp) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[12].act_mlp, 140581770195824) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].act_mlp.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].act_mlp.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[12].act_mlp.training, 140591004393440) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].proj_mlp, accessed_by=DictGetItemGuardAccessor(proj_mlp) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[12].proj_mlp, 140533118533696) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].proj_mlp.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[12].proj_mlp.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].proj_mlp.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[12].proj_mlp.training, 140591004393408) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].proj_mlp._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].proj_mlp.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[12].proj_mlp.lora_A, 140533118548624) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].proj_mlp.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].proj_mlp.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[12].proj_mlp.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].proj_mlp.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[12].proj_mlp.lora_A['default_0'], 140533118538544) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].proj_mlp.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].proj_mlp.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[12].proj_mlp.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].proj_mlp.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].proj_mlp.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[12].proj_mlp.lora_A['default_0'].weight, 140526560570768) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].proj_mlp.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[12].proj_mlp.lora_B, 140533118539648) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].proj_mlp.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].proj_mlp.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[12].proj_mlp.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].proj_mlp.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[12].proj_mlp.lora_B['default_0'], 140533118547856) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].proj_mlp.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].proj_mlp.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[12].proj_mlp.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].proj_mlp.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[12].proj_mlp.base_layer, 140581770195776) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].proj_mlp.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].proj_mlp.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[12].proj_mlp.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].proj_mlp.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[12].proj_mlp.lora_dropout, 140533118549584) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].proj_mlp.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].proj_mlp.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[12].proj_mlp.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].proj_mlp.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[12].proj_mlp.lora_dropout['default_0'], 140533118547424) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].proj_mlp.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].proj_mlp.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[12].proj_mlp.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].proj_mlp.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[12].proj_mlp.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[12].proj_mlp.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].proj_mlp.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[12].proj_mlp.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].proj_mlp.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[12].proj_mlp.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[12].proj_mlp.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].proj_mlp.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[12].proj_mlp.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].proj_mlp._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].proj_mlp._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].proj_mlp.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[12].proj_mlp.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[12].proj_mlp.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].proj_mlp._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[12].proj_mlp._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].proj_mlp._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].proj_mlp._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].proj_mlp._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[12].proj_mlp._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[12].proj_mlp._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].proj_out, accessed_by=DictGetItemGuardAccessor(proj_out) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[12].proj_out, 140533118542480) # hidden_states = gate * self.proj_out(hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:98 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].proj_out.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[12].proj_out.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].proj_out.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[12].proj_out.training, 140591004393408) # hidden_states = gate * self.proj_out(hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:98 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].proj_out._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].proj_out.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[12].proj_out.lora_A, 140533125695184) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].proj_out.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].proj_out.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[12].proj_out.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].proj_out.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[12].proj_out.lora_A['default_0'], 140533119736544) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].proj_out.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].proj_out.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[12].proj_out.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].proj_out.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].proj_out.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[12].proj_out.lora_A['default_0'].weight, 140537216172496) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].proj_out.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[12].proj_out.lora_B, 140533119734336) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].proj_out.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].proj_out.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[12].proj_out.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].proj_out.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[12].proj_out.lora_B['default_0'], 140533119735728) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].proj_out.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].proj_out.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[12].proj_out.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].proj_out.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[12].proj_out.base_layer, 140581770195872) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].proj_out.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].proj_out.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[12].proj_out.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].proj_out.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[12].proj_out.lora_dropout, 140533118539792) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].proj_out.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].proj_out.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[12].proj_out.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].proj_out.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[12].proj_out.lora_dropout['default_0'], 140533118537440) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].proj_out.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].proj_out.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[12].proj_out.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].proj_out.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[12].proj_out.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[12].proj_out.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].proj_out.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[12].proj_out.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].proj_out.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[12].proj_out.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[12].proj_out.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].proj_out.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[12].proj_out.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].proj_out._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].proj_out._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].proj_out.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[12].proj_out.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[12].proj_out.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].proj_out._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[12].proj_out._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].proj_out._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].proj_out._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12].proj_out._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[12].proj_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[12].proj_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[12]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13], accessed_by=GetItemGuardAccessor(13) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[13], 140581770195536) # for index_block, block in enumerate(self.single_transformer_blocks): # diffusers/src/diffusers/models/transformers/transformer_flux.py:509 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[13].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[13].training, 140591004393440) # for index_block, block in enumerate(self.single_transformer_blocks): # diffusers/src/diffusers/models/transformers/transformer_flux.py:509 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].attn, accessed_by=DictGetItemGuardAccessor(attn) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[13].attn, 140581770196736) # attn_output = self.attn( # diffusers/src/diffusers/models/transformers/transformer_flux.py:91 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].attn.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[13].attn.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].attn.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[13].attn.training, 140591004393440) # attn_output = self.attn( # diffusers/src/diffusers/models/transformers/transformer_flux.py:91 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].attn._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].attn.to_k, accessed_by=DictGetItemGuardAccessor(to_k) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[13].attn.to_k, 140533118035872) # key = attn.to_k(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1717 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].attn.to_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[13].attn.to_k.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].attn.to_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[13].attn.to_k.training, 140591004393408) # key = attn.to_k(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1717 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].attn.to_k._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].attn.to_k.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[13].attn.to_k.lora_A, 140533118038464) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].attn.to_k.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].attn.to_k.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[13].attn.to_k.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].attn.to_k.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[13].attn.to_k.lora_A['default_0'], 140533118035968) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].attn.to_k.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].attn.to_k.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[13].attn.to_k.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].attn.to_k.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].attn.to_k.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[13].attn.to_k.lora_A['default_0'].weight, 140526676850416) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].attn.to_k.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[13].attn.to_k.lora_B, 140533118031696) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].attn.to_k.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].attn.to_k.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[13].attn.to_k.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].attn.to_k.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[13].attn.to_k.lora_B['default_0'], 140533118038416) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].attn.to_k.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].attn.to_k.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[13].attn.to_k.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].attn.to_k.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[13].attn.to_k.base_layer, 140581770196880) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].attn.to_k.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].attn.to_k.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[13].attn.to_k.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].attn.to_k.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[13].attn.to_k.lora_dropout, 140533118038224) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].attn.to_k.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].attn.to_k.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[13].attn.to_k.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].attn.to_k.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[13].attn.to_k.lora_dropout['default_0'], 140533118036304) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].attn.to_k.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].attn.to_k.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[13].attn.to_k.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].attn.to_k.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[13].attn.to_k.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[13].attn.to_k.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].attn.to_k.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[13].attn.to_k.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].attn.to_k.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[13].attn.to_k.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[13].attn.to_k.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].attn.to_k.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[13].attn.to_k.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].attn.to_k._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].attn.to_k._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].attn.to_k.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[13].attn.to_k.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[13].attn.to_k.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].attn.to_k._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[13].attn.to_k._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].attn.to_k._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].attn.to_k._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].attn.to_k._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[13].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[13].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].attn.to_q, accessed_by=DictGetItemGuardAccessor(to_q) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[13].attn.to_q, 140533118035728) # query = attn.to_q(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1716 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].attn.to_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[13].attn.to_q.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].attn.to_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[13].attn.to_q.training, 140591004393408) # query = attn.to_q(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1716 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].attn.to_q._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].attn.to_q.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[13].attn.to_q.lora_A, 140533118035008) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].attn.to_q.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].attn.to_q.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[13].attn.to_q.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].attn.to_q.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[13].attn.to_q.lora_A['default_0'], 140533118036016) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].attn.to_q.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].attn.to_q.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[13].attn.to_q.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].attn.to_q.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].attn.to_q.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[13].attn.to_q.lora_A['default_0'].weight, 140526676838416) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].attn.to_q.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[13].attn.to_q.lora_B, 140533118040480) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].attn.to_q.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].attn.to_q.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[13].attn.to_q.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].attn.to_q.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[13].attn.to_q.lora_B['default_0'], 140533118036400) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].attn.to_q.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].attn.to_q.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[13].attn.to_q.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].attn.to_q.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[13].attn.to_q.base_layer, 140581770196976) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].attn.to_q.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].attn.to_q.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[13].attn.to_q.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].attn.to_q.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[13].attn.to_q.lora_dropout, 140533118036352) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].attn.to_q.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].attn.to_q.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[13].attn.to_q.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].attn.to_q.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[13].attn.to_q.lora_dropout['default_0'], 140533118036496) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].attn.to_q.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].attn.to_q.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[13].attn.to_q.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].attn.to_q.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[13].attn.to_q.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[13].attn.to_q.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].attn.to_q.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[13].attn.to_q.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].attn.to_q.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[13].attn.to_q.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[13].attn.to_q.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].attn.to_q.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[13].attn.to_q.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].attn.to_q._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].attn.to_q._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].attn.to_q.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[13].attn.to_q.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[13].attn.to_q.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].attn.to_q._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[13].attn.to_q._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].attn.to_q._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].attn.to_q._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].attn.to_q._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[13].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[13].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].attn.to_v, accessed_by=DictGetItemGuardAccessor(to_v) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[13].attn.to_v, 140533118035152) # value = attn.to_v(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1718 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].attn.to_v.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[13].attn.to_v.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].attn.to_v.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[13].attn.to_v.training, 140591004393408) # value = attn.to_v(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1718 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].attn.to_v._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].attn.to_v.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[13].attn.to_v.lora_A, 140533118036208) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].attn.to_v.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].attn.to_v.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[13].attn.to_v.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].attn.to_v.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[13].attn.to_v.lora_A['default_0'], 140533118026464) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].attn.to_v.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].attn.to_v.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[13].attn.to_v.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].attn.to_v.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].attn.to_v.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[13].attn.to_v.lora_A['default_0'].weight, 140526676839616) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].attn.to_v.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[13].attn.to_v.lora_B, 140533118032128) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].attn.to_v.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].attn.to_v.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[13].attn.to_v.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].attn.to_v.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[13].attn.to_v.lora_B['default_0'], 140533118035344) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].attn.to_v.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].attn.to_v.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[13].attn.to_v.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].attn.to_v.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[13].attn.to_v.base_layer, 140581770197024) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].attn.to_v.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].attn.to_v.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[13].attn.to_v.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].attn.to_v.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[13].attn.to_v.lora_dropout, 140533118026272) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].attn.to_v.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].attn.to_v.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[13].attn.to_v.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].attn.to_v.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[13].attn.to_v.lora_dropout['default_0'], 140533118035104) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].attn.to_v.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].attn.to_v.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[13].attn.to_v.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].attn.to_v.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[13].attn.to_v.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[13].attn.to_v.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].attn.to_v.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[13].attn.to_v.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].attn.to_v.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[13].attn.to_v.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[13].attn.to_v.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].attn.to_v.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[13].attn.to_v.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].attn.to_v._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].attn.to_v._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].attn.to_v.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[13].attn.to_v.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[13].attn.to_v.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].attn.to_v._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[13].attn.to_v._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].attn.to_v._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].attn.to_v._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].attn.to_v._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[13].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[13].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].attn.norm_k, accessed_by=DictGetItemGuardAccessor(norm_k) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[13].attn.norm_k, 140581770196928) # if attn.norm_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1729 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].attn.norm_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[13].attn.norm_k.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].attn.norm_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[13].attn.norm_k.training, 140591004393440) # if attn.norm_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1729 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].attn.norm_k.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[13].attn.norm_k.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].attn.norm_k._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].attn.norm_k.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[13].attn.norm_k.weight, 140581772773872) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].attn.norm_k._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].attn.norm_k._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].attn.norm_k._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].attn.norm_k._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].attn.norm_q, accessed_by=DictGetItemGuardAccessor(norm_q) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[13].attn.norm_q, 140581770196784) # if attn.norm_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1727 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].attn.norm_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[13].attn.norm_q.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].attn.norm_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[13].attn.norm_q.training, 140591004393440) # if attn.norm_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1727 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].attn.norm_q.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[13].attn.norm_q.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].attn.norm_q._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].attn.norm_q.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[13].attn.norm_q.weight, 140581772771872) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].attn.norm_q._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].attn.norm_q._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].attn.norm_q._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].attn.norm_q._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].attn.heads, accessed_by=DictGetItemGuardAccessor(heads) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[13].attn.heads == 24 # head_dim = inner_dim // attn.heads # diffusers/src/diffusers/models/attention_processor.py:1721 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].attn.processor, accessed_by=DictGetItemGuardAccessor(processor) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[13].attn.processor, 93831581524080) # attn_parameters = set(inspect.signature(self.processor.__call__).parameters.keys()) # diffusers/src/diffusers/models/attention_processor.py:479 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[13].attn.processor, 140581770196688) # return self.processor( # diffusers/src/diffusers/models/attention_processor.py:490 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].attn._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].attn._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].attn._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].attn._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].attn.forward, accessed_by=GetAttrGuardAccessor(forward) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].attn.forward, accessed_by=FuncDefaultsGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].attn.forward.__defaults__[0], accessed_by=GetItemGuardAccessor(0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[13].attn.forward.__defaults__[0], 140591004478624) # batch_size, _, _ = hidden_states.shape if encoder_hidden_states is None else encoder_hidden_states.shape # diffusers/src/diffusers/models/attention_processor.py:1713 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].norm, accessed_by=DictGetItemGuardAccessor(norm) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[13].norm, 140581770196352) # norm_hidden_states, gate = self.norm(hidden_states, emb=temb) # diffusers/src/diffusers/models/transformers/transformer_flux.py:88 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].norm.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[13].norm.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].norm.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[13].norm.training, 140591004393440) # norm_hidden_states, gate = self.norm(hidden_states, emb=temb) # diffusers/src/diffusers/models/transformers/transformer_flux.py:88 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].norm._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].norm.norm, accessed_by=DictGetItemGuardAccessor(norm) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[13].norm.norm, 140581770196496) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:171 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].norm.norm.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].norm.norm.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[13].norm.norm.training, 140591004393440) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:171 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].norm.silu, accessed_by=DictGetItemGuardAccessor(silu) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[13].norm.silu, 140581770196400) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].norm.silu.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].norm.silu.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[13].norm.silu.training, 140591004393440) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].norm.linear, accessed_by=DictGetItemGuardAccessor(linear) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[13].norm.linear, 140533118106832) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].norm.linear.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[13].norm.linear.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].norm.linear.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[13].norm.linear.training, 140591004393408) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].norm.linear._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].norm.linear.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[13].norm.linear.lora_A, 140533118101936) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].norm.linear.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].norm.linear.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[13].norm.linear.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].norm.linear.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[13].norm.linear.lora_A['default_0'], 140533119684032) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].norm.linear.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].norm.linear.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[13].norm.linear.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].norm.linear.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].norm.linear.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[13].norm.linear.lora_A['default_0'].weight, 140533118505168) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].norm.linear.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[13].norm.linear.lora_B, 140533119686336) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].norm.linear.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].norm.linear.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[13].norm.linear.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].norm.linear.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[13].norm.linear.lora_B['default_0'], 140533119689216) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].norm.linear.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].norm.linear.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[13].norm.linear.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].norm.linear.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[13].norm.linear.base_layer, 140581770196448) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].norm.linear.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].norm.linear.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[13].norm.linear.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].norm.linear.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[13].norm.linear.lora_dropout, 140533118105344) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].norm.linear.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].norm.linear.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[13].norm.linear.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].norm.linear.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[13].norm.linear.lora_dropout['default_0'], 140533118101648) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].norm.linear.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].norm.linear.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[13].norm.linear.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].norm.linear.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[13].norm.linear.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[13].norm.linear.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].norm.linear.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[13].norm.linear.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].norm.linear.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[13].norm.linear.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[13].norm.linear.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].norm.linear.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[13].norm.linear.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].norm.linear._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].norm.linear._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].norm.linear.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[13].norm.linear.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[13].norm.linear.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].norm.linear._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[13].norm.linear._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].norm.linear._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].norm.linear._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].norm.linear._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[13].norm.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[13].norm.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].norm._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].norm._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].norm._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].norm._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].act_mlp, accessed_by=DictGetItemGuardAccessor(act_mlp) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[13].act_mlp, 140581770196592) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].act_mlp.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].act_mlp.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[13].act_mlp.training, 140591004393440) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].proj_mlp, accessed_by=DictGetItemGuardAccessor(proj_mlp) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[13].proj_mlp, 140533119692048) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].proj_mlp.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[13].proj_mlp.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].proj_mlp.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[13].proj_mlp.training, 140591004393408) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].proj_mlp._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].proj_mlp.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[13].proj_mlp.lora_A, 140533119695696) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].proj_mlp.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].proj_mlp.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[13].proj_mlp.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].proj_mlp.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[13].proj_mlp.lora_A['default_0'], 140533119696176) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].proj_mlp.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].proj_mlp.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[13].proj_mlp.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].proj_mlp.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].proj_mlp.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[13].proj_mlp.lora_A['default_0'].weight, 140533118506368) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].proj_mlp.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[13].proj_mlp.lora_B, 140533119690080) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].proj_mlp.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].proj_mlp.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[13].proj_mlp.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].proj_mlp.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[13].proj_mlp.lora_B['default_0'], 140533119694784) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].proj_mlp.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].proj_mlp.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[13].proj_mlp.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].proj_mlp.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[13].proj_mlp.base_layer, 140581770196544) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].proj_mlp.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].proj_mlp.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[13].proj_mlp.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].proj_mlp.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[13].proj_mlp.lora_dropout, 140533119680672) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].proj_mlp.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].proj_mlp.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[13].proj_mlp.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].proj_mlp.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[13].proj_mlp.lora_dropout['default_0'], 140533119686048) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].proj_mlp.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].proj_mlp.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[13].proj_mlp.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].proj_mlp.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[13].proj_mlp.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[13].proj_mlp.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].proj_mlp.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[13].proj_mlp.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].proj_mlp.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[13].proj_mlp.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[13].proj_mlp.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].proj_mlp.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[13].proj_mlp.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].proj_mlp._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].proj_mlp._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].proj_mlp.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[13].proj_mlp.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[13].proj_mlp.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].proj_mlp._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[13].proj_mlp._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].proj_mlp._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].proj_mlp._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].proj_mlp._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[13].proj_mlp._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[13].proj_mlp._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].proj_out, accessed_by=DictGetItemGuardAccessor(proj_out) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[13].proj_out, 140533118064368) # hidden_states = gate * self.proj_out(hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:98 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].proj_out.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[13].proj_out.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].proj_out.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[13].proj_out.training, 140591004393408) # hidden_states = gate * self.proj_out(hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:98 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].proj_out._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].proj_out.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[13].proj_out.lora_A, 140533118069312) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].proj_out.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].proj_out.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[13].proj_out.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].proj_out.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[13].proj_out.lora_A['default_0'], 140533118027856) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].proj_out.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].proj_out.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[13].proj_out.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].proj_out.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].proj_out.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[13].proj_out.lora_A['default_0'].weight, 140533118516288) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].proj_out.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[13].proj_out.lora_B, 140533118063600) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].proj_out.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].proj_out.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[13].proj_out.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].proj_out.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[13].proj_out.lora_B['default_0'], 140533118033616) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].proj_out.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].proj_out.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[13].proj_out.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].proj_out.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[13].proj_out.base_layer, 140581770196640) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].proj_out.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].proj_out.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[13].proj_out.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].proj_out.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[13].proj_out.lora_dropout, 140533118069120) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].proj_out.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].proj_out.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[13].proj_out.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].proj_out.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[13].proj_out.lora_dropout['default_0'], 140533118062448) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].proj_out.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].proj_out.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[13].proj_out.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].proj_out.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[13].proj_out.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[13].proj_out.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].proj_out.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[13].proj_out.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].proj_out.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[13].proj_out.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[13].proj_out.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].proj_out.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[13].proj_out.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].proj_out._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].proj_out._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].proj_out.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[13].proj_out.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[13].proj_out.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].proj_out._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[13].proj_out._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].proj_out._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].proj_out._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13].proj_out._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[13].proj_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[13].proj_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[13]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14], accessed_by=GetItemGuardAccessor(14) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[14], 140581770196304) # for index_block, block in enumerate(self.single_transformer_blocks): # diffusers/src/diffusers/models/transformers/transformer_flux.py:509 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[14].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[14].training, 140591004393440) # for index_block, block in enumerate(self.single_transformer_blocks): # diffusers/src/diffusers/models/transformers/transformer_flux.py:509 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].attn, accessed_by=DictGetItemGuardAccessor(attn) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[14].attn, 140581770197504) # attn_output = self.attn( # diffusers/src/diffusers/models/transformers/transformer_flux.py:91 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].attn.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[14].attn.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].attn.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[14].attn.training, 140591004393440) # attn_output = self.attn( # diffusers/src/diffusers/models/transformers/transformer_flux.py:91 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].attn._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].attn.to_k, accessed_by=DictGetItemGuardAccessor(to_k) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[14].attn.to_k, 140533117924304) # key = attn.to_k(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1717 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].attn.to_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[14].attn.to_k.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].attn.to_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[14].attn.to_k.training, 140591004393408) # key = attn.to_k(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1717 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].attn.to_k._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].attn.to_k.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[14].attn.to_k.lora_A, 140533117923872) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].attn.to_k.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].attn.to_k.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[14].attn.to_k.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].attn.to_k.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[14].attn.to_k.lora_A['default_0'], 140533117917920) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].attn.to_k.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].attn.to_k.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[14].attn.to_k.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].attn.to_k.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].attn.to_k.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[14].attn.to_k.lora_A['default_0'].weight, 140526770826000) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].attn.to_k.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[14].attn.to_k.lora_B, 140533117925408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].attn.to_k.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].attn.to_k.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[14].attn.to_k.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].attn.to_k.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[14].attn.to_k.lora_B['default_0'], 140533117924448) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].attn.to_k.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].attn.to_k.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[14].attn.to_k.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].attn.to_k.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[14].attn.to_k.base_layer, 140581770197648) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].attn.to_k.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].attn.to_k.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[14].attn.to_k.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].attn.to_k.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[14].attn.to_k.lora_dropout, 140533117914176) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].attn.to_k.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].attn.to_k.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[14].attn.to_k.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].attn.to_k.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[14].attn.to_k.lora_dropout['default_0'], 140533117915760) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].attn.to_k.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].attn.to_k.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[14].attn.to_k.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].attn.to_k.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[14].attn.to_k.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[14].attn.to_k.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].attn.to_k.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[14].attn.to_k.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].attn.to_k.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[14].attn.to_k.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[14].attn.to_k.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].attn.to_k.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[14].attn.to_k.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].attn.to_k._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].attn.to_k._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].attn.to_k.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[14].attn.to_k.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[14].attn.to_k.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].attn.to_k._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[14].attn.to_k._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].attn.to_k._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].attn.to_k._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].attn.to_k._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[14].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[14].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].attn.to_q, accessed_by=DictGetItemGuardAccessor(to_q) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[14].attn.to_q, 140533117917872) # query = attn.to_q(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1716 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].attn.to_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[14].attn.to_q.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].attn.to_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[14].attn.to_q.training, 140591004393408) # query = attn.to_q(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1716 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].attn.to_q._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].attn.to_q.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[14].attn.to_q.lora_A, 140533117921760) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].attn.to_q.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].attn.to_q.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[14].attn.to_q.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].attn.to_q.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[14].attn.to_q.lora_A['default_0'], 140533117922720) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].attn.to_q.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].attn.to_q.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[14].attn.to_q.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].attn.to_q.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].attn.to_q.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[14].attn.to_q.lora_A['default_0'].weight, 140526686019936) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].attn.to_q.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[14].attn.to_q.lora_B, 140533117915808) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].attn.to_q.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].attn.to_q.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[14].attn.to_q.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].attn.to_q.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[14].attn.to_q.lora_B['default_0'], 140533117924064) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].attn.to_q.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].attn.to_q.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[14].attn.to_q.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].attn.to_q.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[14].attn.to_q.base_layer, 140581770197744) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].attn.to_q.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].attn.to_q.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[14].attn.to_q.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].attn.to_q.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[14].attn.to_q.lora_dropout, 140533117921472) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].attn.to_q.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].attn.to_q.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[14].attn.to_q.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].attn.to_q.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[14].attn.to_q.lora_dropout['default_0'], 140533117915232) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].attn.to_q.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].attn.to_q.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[14].attn.to_q.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].attn.to_q.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[14].attn.to_q.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[14].attn.to_q.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].attn.to_q.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[14].attn.to_q.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].attn.to_q.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[14].attn.to_q.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[14].attn.to_q.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].attn.to_q.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[14].attn.to_q.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].attn.to_q._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].attn.to_q._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].attn.to_q.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[14].attn.to_q.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[14].attn.to_q.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].attn.to_q._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[14].attn.to_q._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].attn.to_q._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].attn.to_q._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].attn.to_q._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[14].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[14].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].attn.to_v, accessed_by=DictGetItemGuardAccessor(to_v) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[14].attn.to_v, 140533117912928) # value = attn.to_v(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1718 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].attn.to_v.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[14].attn.to_v.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].attn.to_v.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[14].attn.to_v.training, 140591004393408) # value = attn.to_v(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1718 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].attn.to_v._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].attn.to_v.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[14].attn.to_v.lora_A, 140533117920560) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].attn.to_v.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].attn.to_v.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[14].attn.to_v.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].attn.to_v.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[14].attn.to_v.lora_A['default_0'], 140533117909552) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].attn.to_v.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].attn.to_v.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[14].attn.to_v.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].attn.to_v.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].attn.to_v.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[14].attn.to_v.lora_A['default_0'].weight, 140526770828080) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].attn.to_v.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[14].attn.to_v.lora_B, 140533117921136) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].attn.to_v.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].attn.to_v.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[14].attn.to_v.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].attn.to_v.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[14].attn.to_v.lora_B['default_0'], 140533117902880) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].attn.to_v.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].attn.to_v.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[14].attn.to_v.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].attn.to_v.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[14].attn.to_v.base_layer, 140581770197792) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].attn.to_v.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].attn.to_v.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[14].attn.to_v.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].attn.to_v.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[14].attn.to_v.lora_dropout, 140533117925360) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].attn.to_v.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].attn.to_v.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[14].attn.to_v.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].attn.to_v.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[14].attn.to_v.lora_dropout['default_0'], 140533117921088) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].attn.to_v.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].attn.to_v.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[14].attn.to_v.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].attn.to_v.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[14].attn.to_v.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[14].attn.to_v.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].attn.to_v.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[14].attn.to_v.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].attn.to_v.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[14].attn.to_v.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[14].attn.to_v.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].attn.to_v.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[14].attn.to_v.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].attn.to_v._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].attn.to_v._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].attn.to_v.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[14].attn.to_v.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[14].attn.to_v.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].attn.to_v._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[14].attn.to_v._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].attn.to_v._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].attn.to_v._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].attn.to_v._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[14].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[14].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].attn.norm_k, accessed_by=DictGetItemGuardAccessor(norm_k) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[14].attn.norm_k, 140581770197696) # if attn.norm_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1729 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].attn.norm_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[14].attn.norm_k.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].attn.norm_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[14].attn.norm_k.training, 140591004393440) # if attn.norm_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1729 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].attn.norm_k.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[14].attn.norm_k.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].attn.norm_k._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].attn.norm_k.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[14].attn.norm_k.weight, 140581772772272) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].attn.norm_k._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].attn.norm_k._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].attn.norm_k._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].attn.norm_k._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].attn.norm_q, accessed_by=DictGetItemGuardAccessor(norm_q) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[14].attn.norm_q, 140581770197552) # if attn.norm_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1727 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].attn.norm_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[14].attn.norm_q.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].attn.norm_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[14].attn.norm_q.training, 140591004393440) # if attn.norm_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1727 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].attn.norm_q.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[14].attn.norm_q.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].attn.norm_q._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].attn.norm_q.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[14].attn.norm_q.weight, 140581771722112) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].attn.norm_q._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].attn.norm_q._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].attn.norm_q._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].attn.norm_q._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].attn.heads, accessed_by=DictGetItemGuardAccessor(heads) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[14].attn.heads == 24 # head_dim = inner_dim // attn.heads # diffusers/src/diffusers/models/attention_processor.py:1721 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].attn.processor, accessed_by=DictGetItemGuardAccessor(processor) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[14].attn.processor, 93831581524080) # attn_parameters = set(inspect.signature(self.processor.__call__).parameters.keys()) # diffusers/src/diffusers/models/attention_processor.py:479 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[14].attn.processor, 140581770197456) # return self.processor( # diffusers/src/diffusers/models/attention_processor.py:490 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].attn._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].attn._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].attn._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].attn._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].attn.forward, accessed_by=GetAttrGuardAccessor(forward) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].attn.forward, accessed_by=FuncDefaultsGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].attn.forward.__defaults__[0], accessed_by=GetItemGuardAccessor(0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[14].attn.forward.__defaults__[0], 140591004478624) # batch_size, _, _ = hidden_states.shape if encoder_hidden_states is None else encoder_hidden_states.shape # diffusers/src/diffusers/models/attention_processor.py:1713 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].norm, accessed_by=DictGetItemGuardAccessor(norm) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[14].norm, 140581770197120) # norm_hidden_states, gate = self.norm(hidden_states, emb=temb) # diffusers/src/diffusers/models/transformers/transformer_flux.py:88 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].norm.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[14].norm.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].norm.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[14].norm.training, 140591004393440) # norm_hidden_states, gate = self.norm(hidden_states, emb=temb) # diffusers/src/diffusers/models/transformers/transformer_flux.py:88 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].norm._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].norm.norm, accessed_by=DictGetItemGuardAccessor(norm) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[14].norm.norm, 140581770197264) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:171 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].norm.norm.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].norm.norm.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[14].norm.norm.training, 140591004393440) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:171 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].norm.silu, accessed_by=DictGetItemGuardAccessor(silu) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[14].norm.silu, 140581770197168) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].norm.silu.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].norm.silu.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[14].norm.silu.training, 140591004393440) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].norm.linear, accessed_by=DictGetItemGuardAccessor(linear) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[14].norm.linear, 140533118030592) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].norm.linear.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[14].norm.linear.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].norm.linear.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[14].norm.linear.training, 140591004393408) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].norm.linear._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].norm.linear.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[14].norm.linear.lora_A, 140533118032560) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].norm.linear.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].norm.linear.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[14].norm.linear.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].norm.linear.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[14].norm.linear.lora_A['default_0'], 140533118030928) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].norm.linear.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].norm.linear.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[14].norm.linear.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].norm.linear.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].norm.linear.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[14].norm.linear.lora_A['default_0'].weight, 140526686022176) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].norm.linear.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[14].norm.linear.lora_B, 140533118032992) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].norm.linear.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].norm.linear.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[14].norm.linear.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].norm.linear.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[14].norm.linear.lora_B['default_0'], 140533118031936) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].norm.linear.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].norm.linear.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[14].norm.linear.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].norm.linear.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[14].norm.linear.base_layer, 140581770197216) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].norm.linear.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].norm.linear.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[14].norm.linear.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].norm.linear.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[14].norm.linear.lora_dropout, 140533118030544) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].norm.linear.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].norm.linear.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[14].norm.linear.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].norm.linear.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[14].norm.linear.lora_dropout['default_0'], 140533118029728) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].norm.linear.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].norm.linear.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[14].norm.linear.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].norm.linear.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[14].norm.linear.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[14].norm.linear.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].norm.linear.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[14].norm.linear.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].norm.linear.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[14].norm.linear.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[14].norm.linear.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].norm.linear.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[14].norm.linear.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].norm.linear._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].norm.linear._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].norm.linear.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[14].norm.linear.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[14].norm.linear.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].norm.linear._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[14].norm.linear._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].norm.linear._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].norm.linear._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].norm.linear._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[14].norm.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[14].norm.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].norm._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].norm._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].norm._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].norm._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].act_mlp, accessed_by=DictGetItemGuardAccessor(act_mlp) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[14].act_mlp, 140581770197360) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].act_mlp.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].act_mlp.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[14].act_mlp.training, 140591004393440) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].proj_mlp, accessed_by=DictGetItemGuardAccessor(proj_mlp) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[14].proj_mlp, 140533118033136) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].proj_mlp.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[14].proj_mlp.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].proj_mlp.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[14].proj_mlp.training, 140591004393408) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].proj_mlp._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].proj_mlp.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[14].proj_mlp.lora_A, 140533118027616) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].proj_mlp.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].proj_mlp.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[14].proj_mlp.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].proj_mlp.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[14].proj_mlp.lora_A['default_0'], 140533118029104) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].proj_mlp.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].proj_mlp.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[14].proj_mlp.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].proj_mlp.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].proj_mlp.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[14].proj_mlp.lora_A['default_0'].weight, 140526686017856) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].proj_mlp.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[14].proj_mlp.lora_B, 140533118036112) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].proj_mlp.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].proj_mlp.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[14].proj_mlp.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].proj_mlp.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[14].proj_mlp.lora_B['default_0'], 140533118029536) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].proj_mlp.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].proj_mlp.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[14].proj_mlp.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].proj_mlp.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[14].proj_mlp.base_layer, 140581770197312) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].proj_mlp.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].proj_mlp.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[14].proj_mlp.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].proj_mlp.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[14].proj_mlp.lora_dropout, 140533118034864) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].proj_mlp.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].proj_mlp.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[14].proj_mlp.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].proj_mlp.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[14].proj_mlp.lora_dropout['default_0'], 140533118030256) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].proj_mlp.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].proj_mlp.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[14].proj_mlp.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].proj_mlp.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[14].proj_mlp.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[14].proj_mlp.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].proj_mlp.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[14].proj_mlp.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].proj_mlp.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[14].proj_mlp.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[14].proj_mlp.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].proj_mlp.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[14].proj_mlp.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].proj_mlp._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].proj_mlp._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].proj_mlp.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[14].proj_mlp.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[14].proj_mlp.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].proj_mlp._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[14].proj_mlp._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].proj_mlp._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].proj_mlp._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].proj_mlp._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[14].proj_mlp._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[14].proj_mlp._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].proj_out, accessed_by=DictGetItemGuardAccessor(proj_out) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[14].proj_out, 140533118032752) # hidden_states = gate * self.proj_out(hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:98 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].proj_out.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[14].proj_out.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].proj_out.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[14].proj_out.training, 140591004393408) # hidden_states = gate * self.proj_out(hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:98 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].proj_out._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].proj_out.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[14].proj_out.lora_A, 140533118033712) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].proj_out.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].proj_out.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[14].proj_out.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].proj_out.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[14].proj_out.lora_A['default_0'], 140533117921184) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].proj_out.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].proj_out.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[14].proj_out.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].proj_out.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].proj_out.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[14].proj_out.lora_A['default_0'].weight, 140526686017296) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].proj_out.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[14].proj_out.lora_B, 140533118035488) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].proj_out.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].proj_out.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[14].proj_out.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].proj_out.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[14].proj_out.lora_B['default_0'], 140533117915568) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].proj_out.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].proj_out.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[14].proj_out.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].proj_out.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[14].proj_out.base_layer, 140581770197408) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].proj_out.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].proj_out.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[14].proj_out.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].proj_out.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[14].proj_out.lora_dropout, 140533118041728) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].proj_out.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].proj_out.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[14].proj_out.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].proj_out.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[14].proj_out.lora_dropout['default_0'], 140533118026752) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].proj_out.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].proj_out.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[14].proj_out.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].proj_out.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[14].proj_out.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[14].proj_out.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].proj_out.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[14].proj_out.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].proj_out.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[14].proj_out.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[14].proj_out.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].proj_out.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[14].proj_out.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].proj_out._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].proj_out._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].proj_out.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[14].proj_out.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[14].proj_out.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].proj_out._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[14].proj_out._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].proj_out._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].proj_out._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14].proj_out._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[14].proj_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[14].proj_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[14]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15], accessed_by=GetItemGuardAccessor(15) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[15], 140581770197072) # for index_block, block in enumerate(self.single_transformer_blocks): # diffusers/src/diffusers/models/transformers/transformer_flux.py:509 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[15].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[15].training, 140591004393440) # for index_block, block in enumerate(self.single_transformer_blocks): # diffusers/src/diffusers/models/transformers/transformer_flux.py:509 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].attn, accessed_by=DictGetItemGuardAccessor(attn) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[15].attn, 140581770198272) # attn_output = self.attn( # diffusers/src/diffusers/models/transformers/transformer_flux.py:91 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].attn.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[15].attn.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].attn.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[15].attn.training, 140591004393440) # attn_output = self.attn( # diffusers/src/diffusers/models/transformers/transformer_flux.py:91 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].attn._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].attn.to_k, accessed_by=DictGetItemGuardAccessor(to_k) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[15].attn.to_k, 140533125423664) # key = attn.to_k(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1717 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].attn.to_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[15].attn.to_k.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].attn.to_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[15].attn.to_k.training, 140591004393408) # key = attn.to_k(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1717 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].attn.to_k._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].attn.to_k.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[15].attn.to_k.lora_A, 140533125430336) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].attn.to_k.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].attn.to_k.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[15].attn.to_k.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].attn.to_k.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[15].attn.to_k.lora_A['default_0'], 140533126421168) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].attn.to_k.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].attn.to_k.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[15].attn.to_k.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].attn.to_k.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].attn.to_k.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[15].attn.to_k.lora_A['default_0'].weight, 140526693202768) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].attn.to_k.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[15].attn.to_k.lora_B, 140533125417472) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].attn.to_k.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].attn.to_k.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[15].attn.to_k.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].attn.to_k.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[15].attn.to_k.lora_B['default_0'], 140533126419440) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].attn.to_k.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].attn.to_k.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[15].attn.to_k.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].attn.to_k.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[15].attn.to_k.base_layer, 140581770198416) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].attn.to_k.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].attn.to_k.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[15].attn.to_k.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].attn.to_k.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[15].attn.to_k.lora_dropout, 140533125422128) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].attn.to_k.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].attn.to_k.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[15].attn.to_k.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].attn.to_k.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[15].attn.to_k.lora_dropout['default_0'], 140533125426016) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].attn.to_k.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].attn.to_k.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[15].attn.to_k.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].attn.to_k.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[15].attn.to_k.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[15].attn.to_k.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].attn.to_k.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[15].attn.to_k.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].attn.to_k.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[15].attn.to_k.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[15].attn.to_k.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].attn.to_k.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[15].attn.to_k.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].attn.to_k._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].attn.to_k._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].attn.to_k.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[15].attn.to_k.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[15].attn.to_k.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].attn.to_k._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[15].attn.to_k._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].attn.to_k._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].attn.to_k._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].attn.to_k._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[15].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[15].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].attn.to_q, accessed_by=DictGetItemGuardAccessor(to_q) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[15].attn.to_q, 140533155259888) # query = attn.to_q(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1716 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].attn.to_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[15].attn.to_q.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].attn.to_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[15].attn.to_q.training, 140591004393408) # query = attn.to_q(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1716 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].attn.to_q._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].attn.to_q.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[15].attn.to_q.lora_A, 140533125430192) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].attn.to_q.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].attn.to_q.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[15].attn.to_q.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].attn.to_q.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[15].attn.to_q.lora_A['default_0'], 140533125427648) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].attn.to_q.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].attn.to_q.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[15].attn.to_q.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].attn.to_q.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].attn.to_q.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[15].attn.to_q.lora_A['default_0'].weight, 140526693197728) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].attn.to_q.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[15].attn.to_q.lora_B, 140533125428560) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].attn.to_q.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].attn.to_q.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[15].attn.to_q.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].attn.to_q.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[15].attn.to_q.lora_B['default_0'], 140533125424048) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].attn.to_q.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].attn.to_q.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[15].attn.to_q.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].attn.to_q.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[15].attn.to_q.base_layer, 140581770198512) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].attn.to_q.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].attn.to_q.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[15].attn.to_q.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].attn.to_q.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[15].attn.to_q.lora_dropout, 140533125422080) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].attn.to_q.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].attn.to_q.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[15].attn.to_q.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].attn.to_q.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[15].attn.to_q.lora_dropout['default_0'], 140533125427552) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].attn.to_q.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].attn.to_q.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[15].attn.to_q.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].attn.to_q.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[15].attn.to_q.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[15].attn.to_q.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].attn.to_q.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[15].attn.to_q.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].attn.to_q.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[15].attn.to_q.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[15].attn.to_q.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].attn.to_q.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[15].attn.to_q.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].attn.to_q._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].attn.to_q._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].attn.to_q.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[15].attn.to_q.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[15].attn.to_q.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].attn.to_q._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[15].attn.to_q._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].attn.to_q._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].attn.to_q._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].attn.to_q._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[15].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[15].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].attn.to_v, accessed_by=DictGetItemGuardAccessor(to_v) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[15].attn.to_v, 140533126419872) # value = attn.to_v(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1718 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].attn.to_v.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[15].attn.to_v.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].attn.to_v.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[15].attn.to_v.training, 140591004393408) # value = attn.to_v(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1718 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].attn.to_v._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].attn.to_v.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[15].attn.to_v.lora_A, 140533126415456) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].attn.to_v.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].attn.to_v.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[15].attn.to_v.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].attn.to_v.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[15].attn.to_v.lora_A['default_0'], 140533126464128) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].attn.to_v.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].attn.to_v.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[15].attn.to_v.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].attn.to_v.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].attn.to_v.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[15].attn.to_v.lora_A['default_0'].weight, 140526693190368) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].attn.to_v.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[15].attn.to_v.lora_B, 140533126423424) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].attn.to_v.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].attn.to_v.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[15].attn.to_v.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].attn.to_v.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[15].attn.to_v.lora_B['default_0'], 140533126479392) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].attn.to_v.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].attn.to_v.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[15].attn.to_v.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].attn.to_v.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[15].attn.to_v.base_layer, 140581770198560) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].attn.to_v.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].attn.to_v.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[15].attn.to_v.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].attn.to_v.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[15].attn.to_v.lora_dropout, 140533126427792) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].attn.to_v.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].attn.to_v.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[15].attn.to_v.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].attn.to_v.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[15].attn.to_v.lora_dropout['default_0'], 140533126421552) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].attn.to_v.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].attn.to_v.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[15].attn.to_v.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].attn.to_v.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[15].attn.to_v.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[15].attn.to_v.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].attn.to_v.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[15].attn.to_v.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].attn.to_v.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[15].attn.to_v.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[15].attn.to_v.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].attn.to_v.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[15].attn.to_v.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].attn.to_v._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].attn.to_v._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].attn.to_v.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[15].attn.to_v.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[15].attn.to_v.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].attn.to_v._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[15].attn.to_v._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].attn.to_v._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].attn.to_v._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].attn.to_v._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[15].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[15].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].attn.norm_k, accessed_by=DictGetItemGuardAccessor(norm_k) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[15].attn.norm_k, 140581770198464) # if attn.norm_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1729 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].attn.norm_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[15].attn.norm_k.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].attn.norm_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[15].attn.norm_k.training, 140591004393440) # if attn.norm_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1729 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].attn.norm_k.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[15].attn.norm_k.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].attn.norm_k._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].attn.norm_k.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[15].attn.norm_k.weight, 140581772775472) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].attn.norm_k._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].attn.norm_k._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].attn.norm_k._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].attn.norm_k._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].attn.norm_q, accessed_by=DictGetItemGuardAccessor(norm_q) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[15].attn.norm_q, 140581770198320) # if attn.norm_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1727 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].attn.norm_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[15].attn.norm_q.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].attn.norm_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[15].attn.norm_q.training, 140591004393440) # if attn.norm_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1727 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].attn.norm_q.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[15].attn.norm_q.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].attn.norm_q._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].attn.norm_q.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[15].attn.norm_q.weight, 140581772744304) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].attn.norm_q._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].attn.norm_q._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].attn.norm_q._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].attn.norm_q._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].attn.heads, accessed_by=DictGetItemGuardAccessor(heads) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[15].attn.heads == 24 # head_dim = inner_dim // attn.heads # diffusers/src/diffusers/models/attention_processor.py:1721 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].attn.processor, accessed_by=DictGetItemGuardAccessor(processor) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[15].attn.processor, 93831581524080) # attn_parameters = set(inspect.signature(self.processor.__call__).parameters.keys()) # diffusers/src/diffusers/models/attention_processor.py:479 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[15].attn.processor, 140581770198224) # return self.processor( # diffusers/src/diffusers/models/attention_processor.py:490 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].attn._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].attn._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].attn._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].attn._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].attn.forward, accessed_by=GetAttrGuardAccessor(forward) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].attn.forward, accessed_by=FuncDefaultsGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].attn.forward.__defaults__[0], accessed_by=GetItemGuardAccessor(0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[15].attn.forward.__defaults__[0], 140591004478624) # batch_size, _, _ = hidden_states.shape if encoder_hidden_states is None else encoder_hidden_states.shape # diffusers/src/diffusers/models/attention_processor.py:1713 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].norm, accessed_by=DictGetItemGuardAccessor(norm) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[15].norm, 140581770197888) # norm_hidden_states, gate = self.norm(hidden_states, emb=temb) # diffusers/src/diffusers/models/transformers/transformer_flux.py:88 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].norm.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[15].norm.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].norm.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[15].norm.training, 140591004393440) # norm_hidden_states, gate = self.norm(hidden_states, emb=temb) # diffusers/src/diffusers/models/transformers/transformer_flux.py:88 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].norm._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].norm.norm, accessed_by=DictGetItemGuardAccessor(norm) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[15].norm.norm, 140581770198032) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:171 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].norm.norm.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].norm.norm.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[15].norm.norm.training, 140591004393440) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:171 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].norm.silu, accessed_by=DictGetItemGuardAccessor(silu) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[15].norm.silu, 140581770197936) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].norm.silu.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].norm.silu.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[15].norm.silu.training, 140591004393440) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].norm.linear, accessed_by=DictGetItemGuardAccessor(linear) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[15].norm.linear, 140533117903504) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].norm.linear.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[15].norm.linear.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].norm.linear.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[15].norm.linear.training, 140591004393408) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].norm.linear._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].norm.linear.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[15].norm.linear.lora_A, 140533117896208) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].norm.linear.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].norm.linear.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[15].norm.linear.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].norm.linear.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[15].norm.linear.lora_A['default_0'], 140533117946032) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].norm.linear.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].norm.linear.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[15].norm.linear.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].norm.linear.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].norm.linear.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[15].norm.linear.lora_A['default_0'].weight, 140526770825600) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].norm.linear.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[15].norm.linear.lora_B, 140533117944592) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].norm.linear.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].norm.linear.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[15].norm.linear.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].norm.linear.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[15].norm.linear.lora_B['default_0'], 140533117946128) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].norm.linear.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].norm.linear.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[15].norm.linear.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].norm.linear.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[15].norm.linear.base_layer, 140581770197984) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].norm.linear.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].norm.linear.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[15].norm.linear.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].norm.linear.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[15].norm.linear.lora_dropout, 140533117902160) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].norm.linear.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].norm.linear.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[15].norm.linear.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].norm.linear.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[15].norm.linear.lora_dropout['default_0'], 140533117896016) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].norm.linear.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].norm.linear.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[15].norm.linear.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].norm.linear.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[15].norm.linear.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[15].norm.linear.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].norm.linear.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[15].norm.linear.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].norm.linear.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[15].norm.linear.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[15].norm.linear.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].norm.linear.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[15].norm.linear.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].norm.linear._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].norm.linear._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].norm.linear.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[15].norm.linear.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[15].norm.linear.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].norm.linear._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[15].norm.linear._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].norm.linear._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].norm.linear._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].norm.linear._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[15].norm.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[15].norm.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].norm._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].norm._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].norm._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].norm._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].act_mlp, accessed_by=DictGetItemGuardAccessor(act_mlp) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[15].act_mlp, 140581770198128) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].act_mlp.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].act_mlp.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[15].act_mlp.training, 140591004393440) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].proj_mlp, accessed_by=DictGetItemGuardAccessor(proj_mlp) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[15].proj_mlp, 140533117943968) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].proj_mlp.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[15].proj_mlp.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].proj_mlp.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[15].proj_mlp.training, 140591004393408) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].proj_mlp._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].proj_mlp.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[15].proj_mlp.lora_A, 140533117956688) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].proj_mlp.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].proj_mlp.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[15].proj_mlp.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].proj_mlp.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[15].proj_mlp.lora_A['default_0'], 140533117960096) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].proj_mlp.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].proj_mlp.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[15].proj_mlp.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].proj_mlp.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].proj_mlp.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[15].proj_mlp.lora_A['default_0'].weight, 140537659313584) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].proj_mlp.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[15].proj_mlp.lora_B, 140533117958752) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].proj_mlp.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].proj_mlp.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[15].proj_mlp.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].proj_mlp.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[15].proj_mlp.lora_B['default_0'], 140533117951264) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].proj_mlp.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].proj_mlp.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[15].proj_mlp.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].proj_mlp.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[15].proj_mlp.base_layer, 140581770198080) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].proj_mlp.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].proj_mlp.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[15].proj_mlp.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].proj_mlp.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[15].proj_mlp.lora_dropout, 140533117948768) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].proj_mlp.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].proj_mlp.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[15].proj_mlp.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].proj_mlp.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[15].proj_mlp.lora_dropout['default_0'], 140533117946656) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].proj_mlp.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].proj_mlp.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[15].proj_mlp.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].proj_mlp.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[15].proj_mlp.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[15].proj_mlp.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].proj_mlp.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[15].proj_mlp.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].proj_mlp.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[15].proj_mlp.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[15].proj_mlp.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].proj_mlp.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[15].proj_mlp.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].proj_mlp._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].proj_mlp._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].proj_mlp.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[15].proj_mlp.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[15].proj_mlp.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].proj_mlp._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[15].proj_mlp._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].proj_mlp._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].proj_mlp._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].proj_mlp._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[15].proj_mlp._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[15].proj_mlp._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].proj_out, accessed_by=DictGetItemGuardAccessor(proj_out) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[15].proj_out, 140533117949440) # hidden_states = gate * self.proj_out(hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:98 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].proj_out.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[15].proj_out.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].proj_out.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[15].proj_out.training, 140591004393408) # hidden_states = gate * self.proj_out(hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:98 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].proj_out._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].proj_out.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[15].proj_out.lora_A, 140533117949488) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].proj_out.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].proj_out.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[15].proj_out.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].proj_out.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[15].proj_out.lora_A['default_0'], 140533126304992) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].proj_out.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].proj_out.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[15].proj_out.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].proj_out.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].proj_out.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[15].proj_out.lora_A['default_0'].weight, 140537659312624) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].proj_out.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[15].proj_out.lora_B, 140533117946320) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].proj_out.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].proj_out.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[15].proj_out.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].proj_out.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[15].proj_out.lora_B['default_0'], 140533126307968) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].proj_out.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].proj_out.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[15].proj_out.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].proj_out.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[15].proj_out.base_layer, 140581770198176) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].proj_out.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].proj_out.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[15].proj_out.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].proj_out.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[15].proj_out.lora_dropout, 140533117947376) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].proj_out.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].proj_out.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[15].proj_out.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].proj_out.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[15].proj_out.lora_dropout['default_0'], 140533117958320) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].proj_out.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].proj_out.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[15].proj_out.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].proj_out.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[15].proj_out.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[15].proj_out.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].proj_out.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[15].proj_out.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].proj_out.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[15].proj_out.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[15].proj_out.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].proj_out.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[15].proj_out.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].proj_out._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].proj_out._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].proj_out.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[15].proj_out.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[15].proj_out.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].proj_out._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[15].proj_out._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].proj_out._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].proj_out._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15].proj_out._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[15].proj_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[15].proj_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[15]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16], accessed_by=GetItemGuardAccessor(16) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[16], 140581770197840) # for index_block, block in enumerate(self.single_transformer_blocks): # diffusers/src/diffusers/models/transformers/transformer_flux.py:509 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[16].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[16].training, 140591004393440) # for index_block, block in enumerate(self.single_transformer_blocks): # diffusers/src/diffusers/models/transformers/transformer_flux.py:509 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].attn, accessed_by=DictGetItemGuardAccessor(attn) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[16].attn, 140581770772544) # attn_output = self.attn( # diffusers/src/diffusers/models/transformers/transformer_flux.py:91 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].attn.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[16].attn.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].attn.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[16].attn.training, 140591004393440) # attn_output = self.attn( # diffusers/src/diffusers/models/transformers/transformer_flux.py:91 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].attn._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].attn.to_k, accessed_by=DictGetItemGuardAccessor(to_k) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[16].attn.to_k, 140533125614656) # key = attn.to_k(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1717 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].attn.to_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[16].attn.to_k.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].attn.to_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[16].attn.to_k.training, 140591004393408) # key = attn.to_k(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1717 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].attn.to_k._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].attn.to_k.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[16].attn.to_k.lora_A, 140533125612112) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].attn.to_k.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].attn.to_k.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[16].attn.to_k.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].attn.to_k.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[16].attn.to_k.lora_A['default_0'], 140533125623200) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].attn.to_k.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].attn.to_k.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[16].attn.to_k.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].attn.to_k.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].attn.to_k.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[16].attn.to_k.lora_A['default_0'].weight, 140526677744576) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].attn.to_k.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[16].attn.to_k.lora_B, 140533125612976) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].attn.to_k.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].attn.to_k.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[16].attn.to_k.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].attn.to_k.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[16].attn.to_k.lora_B['default_0'], 140533125613264) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].attn.to_k.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].attn.to_k.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[16].attn.to_k.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].attn.to_k.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[16].attn.to_k.base_layer, 140581770772688) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].attn.to_k.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].attn.to_k.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[16].attn.to_k.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].attn.to_k.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[16].attn.to_k.lora_dropout, 140533125623920) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].attn.to_k.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].attn.to_k.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[16].attn.to_k.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].attn.to_k.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[16].attn.to_k.lora_dropout['default_0'], 140533125627760) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].attn.to_k.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].attn.to_k.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[16].attn.to_k.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].attn.to_k.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[16].attn.to_k.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[16].attn.to_k.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].attn.to_k.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[16].attn.to_k.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].attn.to_k.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[16].attn.to_k.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[16].attn.to_k.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].attn.to_k.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[16].attn.to_k.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].attn.to_k._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].attn.to_k._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].attn.to_k.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[16].attn.to_k.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[16].attn.to_k.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].attn.to_k._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[16].attn.to_k._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].attn.to_k._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].attn.to_k._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].attn.to_k._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[16].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[16].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].attn.to_q, accessed_by=DictGetItemGuardAccessor(to_q) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[16].attn.to_q, 140533126892848) # query = attn.to_q(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1716 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].attn.to_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[16].attn.to_q.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].attn.to_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[16].attn.to_q.training, 140591004393408) # query = attn.to_q(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1716 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].attn.to_q._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].attn.to_q.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[16].attn.to_q.lora_A, 140533126905232) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].attn.to_q.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].attn.to_q.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[16].attn.to_q.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].attn.to_q.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[16].attn.to_q.lora_A['default_0'], 140533125612784) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].attn.to_q.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].attn.to_q.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[16].attn.to_q.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].attn.to_q.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].attn.to_q.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[16].attn.to_q.lora_A['default_0'].weight, 140526677748976) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].attn.to_q.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[16].attn.to_q.lora_B, 140533126900336) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].attn.to_q.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].attn.to_q.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[16].attn.to_q.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].attn.to_q.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[16].attn.to_q.lora_B['default_0'], 140533125626416) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].attn.to_q.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].attn.to_q.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[16].attn.to_q.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].attn.to_q.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[16].attn.to_q.base_layer, 140581770772784) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].attn.to_q.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].attn.to_q.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[16].attn.to_q.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].attn.to_q.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[16].attn.to_q.lora_dropout, 140533126904464) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].attn.to_q.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].attn.to_q.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[16].attn.to_q.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].attn.to_q.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[16].attn.to_q.lora_dropout['default_0'], 140533126902496) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].attn.to_q.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].attn.to_q.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[16].attn.to_q.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].attn.to_q.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[16].attn.to_q.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[16].attn.to_q.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].attn.to_q.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[16].attn.to_q.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].attn.to_q.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[16].attn.to_q.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[16].attn.to_q.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].attn.to_q.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[16].attn.to_q.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].attn.to_q._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].attn.to_q._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].attn.to_q.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[16].attn.to_q.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[16].attn.to_q.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].attn.to_q._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[16].attn.to_q._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].attn.to_q._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].attn.to_q._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].attn.to_q._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[16].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[16].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].attn.to_v, accessed_by=DictGetItemGuardAccessor(to_v) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[16].attn.to_v, 140533173221840) # value = attn.to_v(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1718 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].attn.to_v.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[16].attn.to_v.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].attn.to_v.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[16].attn.to_v.training, 140591004393408) # value = attn.to_v(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1718 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].attn.to_v._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].attn.to_v.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[16].attn.to_v.lora_A, 140533126789104) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].attn.to_v.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].attn.to_v.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[16].attn.to_v.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].attn.to_v.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[16].attn.to_v.lora_A['default_0'], 140533126785264) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].attn.to_v.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].attn.to_v.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[16].attn.to_v.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].attn.to_v.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].attn.to_v.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[16].attn.to_v.lora_A['default_0'].weight, 140526677745296) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].attn.to_v.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[16].attn.to_v.lora_B, 140533126775376) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].attn.to_v.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].attn.to_v.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[16].attn.to_v.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].attn.to_v.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[16].attn.to_v.lora_B['default_0'], 140533126786896) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].attn.to_v.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].attn.to_v.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[16].attn.to_v.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].attn.to_v.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[16].attn.to_v.base_layer, 140581770772832) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].attn.to_v.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].attn.to_v.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[16].attn.to_v.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].attn.to_v.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[16].attn.to_v.lora_dropout, 140533129211680) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].attn.to_v.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].attn.to_v.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[16].attn.to_v.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].attn.to_v.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[16].attn.to_v.lora_dropout['default_0'], 140533129203616) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].attn.to_v.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].attn.to_v.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[16].attn.to_v.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].attn.to_v.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[16].attn.to_v.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[16].attn.to_v.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].attn.to_v.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[16].attn.to_v.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].attn.to_v.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[16].attn.to_v.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[16].attn.to_v.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].attn.to_v.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[16].attn.to_v.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].attn.to_v._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].attn.to_v._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].attn.to_v.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[16].attn.to_v.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[16].attn.to_v.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].attn.to_v._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[16].attn.to_v._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].attn.to_v._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].attn.to_v._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].attn.to_v._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[16].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[16].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].attn.norm_k, accessed_by=DictGetItemGuardAccessor(norm_k) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[16].attn.norm_k, 140581770772736) # if attn.norm_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1729 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].attn.norm_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[16].attn.norm_k.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].attn.norm_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[16].attn.norm_k.training, 140591004393440) # if attn.norm_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1729 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].attn.norm_k.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[16].attn.norm_k.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].attn.norm_k._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].attn.norm_k.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[16].attn.norm_k.weight, 140581772777712) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].attn.norm_k._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].attn.norm_k._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].attn.norm_k._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].attn.norm_k._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].attn.norm_q, accessed_by=DictGetItemGuardAccessor(norm_q) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[16].attn.norm_q, 140581770772592) # if attn.norm_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1727 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].attn.norm_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[16].attn.norm_q.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].attn.norm_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[16].attn.norm_q.training, 140591004393440) # if attn.norm_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1727 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].attn.norm_q.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[16].attn.norm_q.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].attn.norm_q._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].attn.norm_q.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[16].attn.norm_q.weight, 140581772780512) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].attn.norm_q._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].attn.norm_q._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].attn.norm_q._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].attn.norm_q._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].attn.heads, accessed_by=DictGetItemGuardAccessor(heads) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[16].attn.heads == 24 # head_dim = inner_dim // attn.heads # diffusers/src/diffusers/models/attention_processor.py:1721 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].attn.processor, accessed_by=DictGetItemGuardAccessor(processor) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[16].attn.processor, 93831581524080) # attn_parameters = set(inspect.signature(self.processor.__call__).parameters.keys()) # diffusers/src/diffusers/models/attention_processor.py:479 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[16].attn.processor, 140581770198992) # return self.processor( # diffusers/src/diffusers/models/attention_processor.py:490 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].attn._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].attn._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].attn._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].attn._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].attn.forward, accessed_by=GetAttrGuardAccessor(forward) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].attn.forward, accessed_by=FuncDefaultsGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].attn.forward.__defaults__[0], accessed_by=GetItemGuardAccessor(0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[16].attn.forward.__defaults__[0], 140591004478624) # batch_size, _, _ = hidden_states.shape if encoder_hidden_states is None else encoder_hidden_states.shape # diffusers/src/diffusers/models/attention_processor.py:1713 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].norm, accessed_by=DictGetItemGuardAccessor(norm) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[16].norm, 140581770198656) # norm_hidden_states, gate = self.norm(hidden_states, emb=temb) # diffusers/src/diffusers/models/transformers/transformer_flux.py:88 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].norm.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[16].norm.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].norm.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[16].norm.training, 140591004393440) # norm_hidden_states, gate = self.norm(hidden_states, emb=temb) # diffusers/src/diffusers/models/transformers/transformer_flux.py:88 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].norm._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].norm.norm, accessed_by=DictGetItemGuardAccessor(norm) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[16].norm.norm, 140581770198800) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:171 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].norm.norm.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].norm.norm.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[16].norm.norm.training, 140591004393440) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:171 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].norm.silu, accessed_by=DictGetItemGuardAccessor(silu) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[16].norm.silu, 140581770198704) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].norm.silu.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].norm.silu.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[16].norm.silu.training, 140591004393440) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].norm.linear, accessed_by=DictGetItemGuardAccessor(linear) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[16].norm.linear, 140533126469984) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].norm.linear.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[16].norm.linear.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].norm.linear.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[16].norm.linear.training, 140591004393408) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].norm.linear._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].norm.linear.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[16].norm.linear.lora_A, 140533126997584) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].norm.linear.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].norm.linear.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[16].norm.linear.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].norm.linear.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[16].norm.linear.lora_A['default_0'], 140533127057312) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].norm.linear.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].norm.linear.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[16].norm.linear.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].norm.linear.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].norm.linear.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[16].norm.linear.lora_A['default_0'].weight, 140531262099504) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].norm.linear.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[16].norm.linear.lora_B, 140533126990768) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].norm.linear.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].norm.linear.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[16].norm.linear.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].norm.linear.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[16].norm.linear.lora_B['default_0'], 140533127068544) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].norm.linear.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].norm.linear.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[16].norm.linear.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].norm.linear.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[16].norm.linear.base_layer, 140581770198752) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].norm.linear.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].norm.linear.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[16].norm.linear.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].norm.linear.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[16].norm.linear.lora_dropout, 140533126990336) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].norm.linear.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].norm.linear.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[16].norm.linear.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].norm.linear.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[16].norm.linear.lora_dropout['default_0'], 140533126465424) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].norm.linear.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].norm.linear.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[16].norm.linear.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].norm.linear.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[16].norm.linear.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[16].norm.linear.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].norm.linear.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[16].norm.linear.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].norm.linear.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[16].norm.linear.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[16].norm.linear.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].norm.linear.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[16].norm.linear.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].norm.linear._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].norm.linear._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].norm.linear.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[16].norm.linear.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[16].norm.linear.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].norm.linear._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[16].norm.linear._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].norm.linear._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].norm.linear._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].norm.linear._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[16].norm.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[16].norm.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].norm._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].norm._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].norm._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].norm._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].act_mlp, accessed_by=DictGetItemGuardAccessor(act_mlp) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[16].act_mlp, 140581770198896) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].act_mlp.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].act_mlp.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[16].act_mlp.training, 140591004393440) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].proj_mlp, accessed_by=DictGetItemGuardAccessor(proj_mlp) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[16].proj_mlp, 140533127053520) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].proj_mlp.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[16].proj_mlp.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].proj_mlp.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[16].proj_mlp.training, 140591004393408) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].proj_mlp._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].proj_mlp.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[16].proj_mlp.lora_A, 140533127058464) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].proj_mlp.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].proj_mlp.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[16].proj_mlp.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].proj_mlp.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[16].proj_mlp.lora_A['default_0'], 140533125486800) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].proj_mlp.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].proj_mlp.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[16].proj_mlp.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].proj_mlp.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].proj_mlp.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[16].proj_mlp.lora_A['default_0'].weight, 140531262108544) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].proj_mlp.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[16].proj_mlp.lora_B, 140533125485408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].proj_mlp.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].proj_mlp.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[16].proj_mlp.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].proj_mlp.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[16].proj_mlp.lora_B['default_0'], 140533125482336) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].proj_mlp.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].proj_mlp.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[16].proj_mlp.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].proj_mlp.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[16].proj_mlp.base_layer, 140581770198848) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].proj_mlp.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].proj_mlp.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[16].proj_mlp.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].proj_mlp.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[16].proj_mlp.lora_dropout, 140533127067392) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].proj_mlp.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].proj_mlp.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[16].proj_mlp.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].proj_mlp.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[16].proj_mlp.lora_dropout['default_0'], 140533127063360) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].proj_mlp.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].proj_mlp.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[16].proj_mlp.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].proj_mlp.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[16].proj_mlp.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[16].proj_mlp.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].proj_mlp.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[16].proj_mlp.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].proj_mlp.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[16].proj_mlp.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[16].proj_mlp.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].proj_mlp.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[16].proj_mlp.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].proj_mlp._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].proj_mlp._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].proj_mlp.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[16].proj_mlp.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[16].proj_mlp.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].proj_mlp._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[16].proj_mlp._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].proj_mlp._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].proj_mlp._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].proj_mlp._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[16].proj_mlp._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[16].proj_mlp._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].proj_out, accessed_by=DictGetItemGuardAccessor(proj_out) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[16].proj_out, 140533125494576) # hidden_states = gate * self.proj_out(hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:98 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].proj_out.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[16].proj_out.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].proj_out.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[16].proj_out.training, 140591004393408) # hidden_states = gate * self.proj_out(hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:98 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].proj_out._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].proj_out.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[16].proj_out.lora_A, 140533125481088) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].proj_out.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].proj_out.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[16].proj_out.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].proj_out.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[16].proj_out.lora_A['default_0'], 140533126896256) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].proj_out.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].proj_out.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[16].proj_out.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].proj_out.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].proj_out.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[16].proj_out.lora_A['default_0'].weight, 140531262099664) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].proj_out.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[16].proj_out.lora_B, 140533125490256) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].proj_out.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].proj_out.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[16].proj_out.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].proj_out.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[16].proj_out.lora_B['default_0'], 140533126897360) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].proj_out.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].proj_out.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[16].proj_out.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].proj_out.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[16].proj_out.base_layer, 140581770198944) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].proj_out.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].proj_out.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[16].proj_out.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].proj_out.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[16].proj_out.lora_dropout, 140533125485168) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].proj_out.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].proj_out.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[16].proj_out.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].proj_out.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[16].proj_out.lora_dropout['default_0'], 140533125494432) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].proj_out.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].proj_out.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[16].proj_out.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].proj_out.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[16].proj_out.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[16].proj_out.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].proj_out.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[16].proj_out.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].proj_out.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[16].proj_out.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[16].proj_out.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].proj_out.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[16].proj_out.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].proj_out._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].proj_out._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].proj_out.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[16].proj_out.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[16].proj_out.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].proj_out._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[16].proj_out._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].proj_out._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].proj_out._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16].proj_out._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[16].proj_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[16].proj_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[16]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17], accessed_by=GetItemGuardAccessor(17) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[17], 140581770198608) # for index_block, block in enumerate(self.single_transformer_blocks): # diffusers/src/diffusers/models/transformers/transformer_flux.py:509 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[17].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[17].training, 140591004393440) # for index_block, block in enumerate(self.single_transformer_blocks): # diffusers/src/diffusers/models/transformers/transformer_flux.py:509 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].attn, accessed_by=DictGetItemGuardAccessor(attn) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[17].attn, 140581770773312) # attn_output = self.attn( # diffusers/src/diffusers/models/transformers/transformer_flux.py:91 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].attn.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[17].attn.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].attn.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[17].attn.training, 140591004393440) # attn_output = self.attn( # diffusers/src/diffusers/models/transformers/transformer_flux.py:91 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].attn._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].attn.to_k, accessed_by=DictGetItemGuardAccessor(to_k) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[17].attn.to_k, 140533213662368) # key = attn.to_k(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1717 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].attn.to_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[17].attn.to_k.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].attn.to_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[17].attn.to_k.training, 140591004393408) # key = attn.to_k(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1717 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].attn.to_k._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].attn.to_k.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[17].attn.to_k.lora_A, 140533213816880) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].attn.to_k.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].attn.to_k.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[17].attn.to_k.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].attn.to_k.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[17].attn.to_k.lora_A['default_0'], 140533215136496) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].attn.to_k.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].attn.to_k.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[17].attn.to_k.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].attn.to_k.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].attn.to_k.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[17].attn.to_k.lora_A['default_0'].weight, 140526662616304) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].attn.to_k.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[17].attn.to_k.lora_B, 140533213814912) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].attn.to_k.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].attn.to_k.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[17].attn.to_k.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].attn.to_k.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[17].attn.to_k.lora_B['default_0'], 140533214789392) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].attn.to_k.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].attn.to_k.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[17].attn.to_k.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].attn.to_k.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[17].attn.to_k.base_layer, 140581770773456) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].attn.to_k.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].attn.to_k.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[17].attn.to_k.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].attn.to_k.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[17].attn.to_k.lora_dropout, 140533213660880) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].attn.to_k.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].attn.to_k.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[17].attn.to_k.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].attn.to_k.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[17].attn.to_k.lora_dropout['default_0'], 140533213672880) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].attn.to_k.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].attn.to_k.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[17].attn.to_k.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].attn.to_k.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[17].attn.to_k.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[17].attn.to_k.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].attn.to_k.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[17].attn.to_k.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].attn.to_k.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[17].attn.to_k.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[17].attn.to_k.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].attn.to_k.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[17].attn.to_k.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].attn.to_k._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].attn.to_k._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].attn.to_k.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[17].attn.to_k.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[17].attn.to_k.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].attn.to_k._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[17].attn.to_k._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].attn.to_k._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].attn.to_k._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].attn.to_k._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[17].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[17].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].attn.to_q, accessed_by=DictGetItemGuardAccessor(to_q) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[17].attn.to_q, 140533212354816) # query = attn.to_q(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1716 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].attn.to_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[17].attn.to_q.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].attn.to_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[17].attn.to_q.training, 140591004393408) # query = attn.to_q(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1716 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].attn.to_q._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].attn.to_q.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[17].attn.to_q.lora_A, 140533212357168) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].attn.to_q.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].attn.to_q.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[17].attn.to_q.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].attn.to_q.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[17].attn.to_q.lora_A['default_0'], 140533213992880) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].attn.to_q.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].attn.to_q.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[17].attn.to_q.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].attn.to_q.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].attn.to_q.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[17].attn.to_q.lora_A['default_0'].weight, 140526662629584) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].attn.to_q.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[17].attn.to_q.lora_B, 140533214172768) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].attn.to_q.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].attn.to_q.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[17].attn.to_q.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].attn.to_q.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[17].attn.to_q.lora_B['default_0'], 140533213999792) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].attn.to_q.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].attn.to_q.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[17].attn.to_q.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].attn.to_q.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[17].attn.to_q.base_layer, 140581770773552) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].attn.to_q.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].attn.to_q.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[17].attn.to_q.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].attn.to_q.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[17].attn.to_q.lora_dropout, 140533212362160) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].attn.to_q.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].attn.to_q.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[17].attn.to_q.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].attn.to_q.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[17].attn.to_q.lora_dropout['default_0'], 140533212354192) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].attn.to_q.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].attn.to_q.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[17].attn.to_q.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].attn.to_q.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[17].attn.to_q.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[17].attn.to_q.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].attn.to_q.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[17].attn.to_q.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].attn.to_q.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[17].attn.to_q.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[17].attn.to_q.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].attn.to_q.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[17].attn.to_q.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].attn.to_q._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].attn.to_q._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].attn.to_q.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[17].attn.to_q.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[17].attn.to_q.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].attn.to_q._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[17].attn.to_q._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].attn.to_q._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].attn.to_q._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].attn.to_q._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[17].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[17].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].attn.to_v, accessed_by=DictGetItemGuardAccessor(to_v) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[17].attn.to_v, 140533173123824) # value = attn.to_v(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1718 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].attn.to_v.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[17].attn.to_v.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].attn.to_v.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[17].attn.to_v.training, 140591004393408) # value = attn.to_v(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1718 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].attn.to_v._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].attn.to_v.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[17].attn.to_v.lora_A, 140533214361680) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].attn.to_v.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].attn.to_v.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[17].attn.to_v.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].attn.to_v.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[17].attn.to_v.lora_A['default_0'], 140533214560160) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].attn.to_v.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].attn.to_v.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[17].attn.to_v.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].attn.to_v.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].attn.to_v.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[17].attn.to_v.lora_A['default_0'].weight, 140526662044784) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].attn.to_v.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[17].attn.to_v.lora_B, 140533214352176) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].attn.to_v.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].attn.to_v.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[17].attn.to_v.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].attn.to_v.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[17].attn.to_v.lora_B['default_0'], 140533214556608) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].attn.to_v.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].attn.to_v.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[17].attn.to_v.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].attn.to_v.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[17].attn.to_v.base_layer, 140581770773600) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].attn.to_v.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].attn.to_v.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[17].attn.to_v.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].attn.to_v.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[17].attn.to_v.lora_dropout, 140533214350064) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].attn.to_v.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].attn.to_v.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[17].attn.to_v.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].attn.to_v.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[17].attn.to_v.lora_dropout['default_0'], 140533214349344) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].attn.to_v.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].attn.to_v.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[17].attn.to_v.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].attn.to_v.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[17].attn.to_v.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[17].attn.to_v.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].attn.to_v.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[17].attn.to_v.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].attn.to_v.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[17].attn.to_v.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[17].attn.to_v.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].attn.to_v.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[17].attn.to_v.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].attn.to_v._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].attn.to_v._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].attn.to_v.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[17].attn.to_v.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[17].attn.to_v.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].attn.to_v._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[17].attn.to_v._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].attn.to_v._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].attn.to_v._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].attn.to_v._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[17].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[17].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].attn.norm_k, accessed_by=DictGetItemGuardAccessor(norm_k) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[17].attn.norm_k, 140581770773504) # if attn.norm_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1729 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].attn.norm_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[17].attn.norm_k.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].attn.norm_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[17].attn.norm_k.training, 140591004393440) # if attn.norm_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1729 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].attn.norm_k.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[17].attn.norm_k.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].attn.norm_k._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].attn.norm_k.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[17].attn.norm_k.weight, 140581772714416) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].attn.norm_k._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].attn.norm_k._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].attn.norm_k._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].attn.norm_k._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].attn.norm_q, accessed_by=DictGetItemGuardAccessor(norm_q) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[17].attn.norm_q, 140581770773360) # if attn.norm_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1727 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].attn.norm_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[17].attn.norm_q.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].attn.norm_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[17].attn.norm_q.training, 140591004393440) # if attn.norm_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1727 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].attn.norm_q.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[17].attn.norm_q.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].attn.norm_q._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].attn.norm_q.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[17].attn.norm_q.weight, 140581765134464) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].attn.norm_q._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].attn.norm_q._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].attn.norm_q._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].attn.norm_q._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].attn.heads, accessed_by=DictGetItemGuardAccessor(heads) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[17].attn.heads == 24 # head_dim = inner_dim // attn.heads # diffusers/src/diffusers/models/attention_processor.py:1721 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].attn.processor, accessed_by=DictGetItemGuardAccessor(processor) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[17].attn.processor, 93831581524080) # attn_parameters = set(inspect.signature(self.processor.__call__).parameters.keys()) # diffusers/src/diffusers/models/attention_processor.py:479 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[17].attn.processor, 140581770773264) # return self.processor( # diffusers/src/diffusers/models/attention_processor.py:490 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].attn._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].attn._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].attn._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].attn._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].attn.forward, accessed_by=GetAttrGuardAccessor(forward) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].attn.forward, accessed_by=FuncDefaultsGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].attn.forward.__defaults__[0], accessed_by=GetItemGuardAccessor(0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[17].attn.forward.__defaults__[0], 140591004478624) # batch_size, _, _ = hidden_states.shape if encoder_hidden_states is None else encoder_hidden_states.shape # diffusers/src/diffusers/models/attention_processor.py:1713 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].norm, accessed_by=DictGetItemGuardAccessor(norm) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[17].norm, 140581770772928) # norm_hidden_states, gate = self.norm(hidden_states, emb=temb) # diffusers/src/diffusers/models/transformers/transformer_flux.py:88 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].norm.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[17].norm.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].norm.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[17].norm.training, 140591004393440) # norm_hidden_states, gate = self.norm(hidden_states, emb=temb) # diffusers/src/diffusers/models/transformers/transformer_flux.py:88 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].norm._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].norm.norm, accessed_by=DictGetItemGuardAccessor(norm) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[17].norm.norm, 140581770773072) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:171 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].norm.norm.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].norm.norm.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[17].norm.norm.training, 140591004393440) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:171 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].norm.silu, accessed_by=DictGetItemGuardAccessor(silu) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[17].norm.silu, 140581770772976) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].norm.silu.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].norm.silu.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[17].norm.silu.training, 140591004393440) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].norm.linear, accessed_by=DictGetItemGuardAccessor(linear) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[17].norm.linear, 140533171334416) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].norm.linear.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[17].norm.linear.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].norm.linear.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[17].norm.linear.training, 140591004393408) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].norm.linear._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].norm.linear.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[17].norm.linear.lora_A, 140533170878352) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].norm.linear.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].norm.linear.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[17].norm.linear.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].norm.linear.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[17].norm.linear.lora_A['default_0'], 140533170612944) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].norm.linear.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].norm.linear.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[17].norm.linear.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].norm.linear.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].norm.linear.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[17].norm.linear.lora_A['default_0'].weight, 140526681737952) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].norm.linear.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[17].norm.linear.lora_B, 140533171240144) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].norm.linear.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].norm.linear.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[17].norm.linear.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].norm.linear.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[17].norm.linear.lora_B['default_0'], 140533170610784) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].norm.linear.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].norm.linear.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[17].norm.linear.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].norm.linear.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[17].norm.linear.base_layer, 140581770773024) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].norm.linear.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].norm.linear.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[17].norm.linear.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].norm.linear.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[17].norm.linear.lora_dropout, 140533169426144) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].norm.linear.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].norm.linear.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[17].norm.linear.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].norm.linear.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[17].norm.linear.lora_dropout['default_0'], 140533169426240) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].norm.linear.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].norm.linear.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[17].norm.linear.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].norm.linear.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[17].norm.linear.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[17].norm.linear.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].norm.linear.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[17].norm.linear.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].norm.linear.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[17].norm.linear.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[17].norm.linear.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].norm.linear.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[17].norm.linear.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].norm.linear._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].norm.linear._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].norm.linear.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[17].norm.linear.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[17].norm.linear.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].norm.linear._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[17].norm.linear._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].norm.linear._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].norm.linear._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].norm.linear._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[17].norm.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[17].norm.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].norm._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].norm._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].norm._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].norm._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].act_mlp, accessed_by=DictGetItemGuardAccessor(act_mlp) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[17].act_mlp, 140581770773168) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].act_mlp.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].act_mlp.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[17].act_mlp.training, 140591004393440) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].proj_mlp, accessed_by=DictGetItemGuardAccessor(proj_mlp) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[17].proj_mlp, 140533173123152) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].proj_mlp.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[17].proj_mlp.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].proj_mlp.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[17].proj_mlp.training, 140591004393408) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].proj_mlp._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].proj_mlp.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[17].proj_mlp.lora_A, 140533211641888) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].proj_mlp.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].proj_mlp.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[17].proj_mlp.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].proj_mlp.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[17].proj_mlp.lora_A['default_0'], 140533172005344) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].proj_mlp.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].proj_mlp.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[17].proj_mlp.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].proj_mlp.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].proj_mlp.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[17].proj_mlp.lora_A['default_0'].weight, 140526681748192) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].proj_mlp.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[17].proj_mlp.lora_B, 140533171996992) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].proj_mlp.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].proj_mlp.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[17].proj_mlp.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].proj_mlp.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[17].proj_mlp.lora_B['default_0'], 140533172003616) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].proj_mlp.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].proj_mlp.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[17].proj_mlp.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].proj_mlp.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[17].proj_mlp.base_layer, 140581770773120) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].proj_mlp.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].proj_mlp.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[17].proj_mlp.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].proj_mlp.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[17].proj_mlp.lora_dropout, 140533171742048) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].proj_mlp.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].proj_mlp.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[17].proj_mlp.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].proj_mlp.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[17].proj_mlp.lora_dropout['default_0'], 140533171739552) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].proj_mlp.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].proj_mlp.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[17].proj_mlp.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].proj_mlp.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[17].proj_mlp.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[17].proj_mlp.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].proj_mlp.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[17].proj_mlp.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].proj_mlp.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[17].proj_mlp.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[17].proj_mlp.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].proj_mlp.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[17].proj_mlp.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].proj_mlp._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].proj_mlp._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].proj_mlp.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[17].proj_mlp.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[17].proj_mlp.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].proj_mlp._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[17].proj_mlp._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].proj_mlp._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].proj_mlp._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].proj_mlp._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[17].proj_mlp._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[17].proj_mlp._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].proj_out, accessed_by=DictGetItemGuardAccessor(proj_out) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[17].proj_out, 140533212584864) # hidden_states = gate * self.proj_out(hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:98 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].proj_out.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[17].proj_out.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].proj_out.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[17].proj_out.training, 140591004393408) # hidden_states = gate * self.proj_out(hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:98 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].proj_out._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].proj_out.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[17].proj_out.lora_A, 140533211512784) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].proj_out.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].proj_out.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[17].proj_out.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].proj_out.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[17].proj_out.lora_A['default_0'], 140533212906256) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].proj_out.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].proj_out.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[17].proj_out.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].proj_out.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].proj_out.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[17].proj_out.lora_A['default_0'].weight, 140526681743632) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].proj_out.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[17].proj_out.lora_B, 140533212445040) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].proj_out.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].proj_out.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[17].proj_out.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].proj_out.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[17].proj_out.lora_B['default_0'], 140533212918832) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].proj_out.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].proj_out.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[17].proj_out.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].proj_out.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[17].proj_out.base_layer, 140581770773216) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].proj_out.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].proj_out.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[17].proj_out.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].proj_out.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[17].proj_out.lora_dropout, 140533211505056) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].proj_out.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].proj_out.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[17].proj_out.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].proj_out.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[17].proj_out.lora_dropout['default_0'], 140533211501984) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].proj_out.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].proj_out.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[17].proj_out.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].proj_out.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[17].proj_out.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[17].proj_out.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].proj_out.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[17].proj_out.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].proj_out.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[17].proj_out.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[17].proj_out.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].proj_out.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[17].proj_out.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].proj_out._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].proj_out._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].proj_out.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[17].proj_out.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[17].proj_out.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].proj_out._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[17].proj_out._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].proj_out._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].proj_out._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17].proj_out._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[17].proj_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[17].proj_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[17]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18], accessed_by=GetItemGuardAccessor(18) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[18], 140581770772880) # for index_block, block in enumerate(self.single_transformer_blocks): # diffusers/src/diffusers/models/transformers/transformer_flux.py:509 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[18].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[18].training, 140591004393440) # for index_block, block in enumerate(self.single_transformer_blocks): # diffusers/src/diffusers/models/transformers/transformer_flux.py:509 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].attn, accessed_by=DictGetItemGuardAccessor(attn) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[18].attn, 140581770774080) # attn_output = self.attn( # diffusers/src/diffusers/models/transformers/transformer_flux.py:91 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].attn.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[18].attn.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].attn.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[18].attn.training, 140591004393440) # attn_output = self.attn( # diffusers/src/diffusers/models/transformers/transformer_flux.py:91 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].attn._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].attn.to_k, accessed_by=DictGetItemGuardAccessor(to_k) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[18].attn.to_k, 140533127358512) # key = attn.to_k(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1717 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].attn.to_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[18].attn.to_k.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].attn.to_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[18].attn.to_k.training, 140591004393408) # key = attn.to_k(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1717 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].attn.to_k._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].attn.to_k.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[18].attn.to_k.lora_A, 140533127358464) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].attn.to_k.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].attn.to_k.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[18].attn.to_k.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].attn.to_k.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[18].attn.to_k.lora_A['default_0'], 140533127364128) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].attn.to_k.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].attn.to_k.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[18].attn.to_k.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].attn.to_k.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].attn.to_k.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[18].attn.to_k.lora_A['default_0'].weight, 140526675030832) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].attn.to_k.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[18].attn.to_k.lora_B, 140533127361632) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].attn.to_k.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].attn.to_k.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[18].attn.to_k.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].attn.to_k.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[18].attn.to_k.lora_B['default_0'], 140533127363552) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].attn.to_k.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].attn.to_k.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[18].attn.to_k.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].attn.to_k.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[18].attn.to_k.base_layer, 140581770774224) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].attn.to_k.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].attn.to_k.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[18].attn.to_k.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].attn.to_k.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[18].attn.to_k.lora_dropout, 140533127348672) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].attn.to_k.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].attn.to_k.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[18].attn.to_k.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].attn.to_k.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[18].attn.to_k.lora_dropout['default_0'], 140533127363456) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].attn.to_k.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].attn.to_k.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[18].attn.to_k.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].attn.to_k.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[18].attn.to_k.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[18].attn.to_k.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].attn.to_k.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[18].attn.to_k.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].attn.to_k.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[18].attn.to_k.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[18].attn.to_k.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].attn.to_k.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[18].attn.to_k.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].attn.to_k._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].attn.to_k._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].attn.to_k.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[18].attn.to_k.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[18].attn.to_k.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].attn.to_k._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[18].attn.to_k._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].attn.to_k._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].attn.to_k._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].attn.to_k._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[18].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[18].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].attn.to_q, accessed_by=DictGetItemGuardAccessor(to_q) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[18].attn.to_q, 140533131364288) # query = attn.to_q(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1716 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].attn.to_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[18].attn.to_q.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].attn.to_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[18].attn.to_q.training, 140591004393408) # query = attn.to_q(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1716 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].attn.to_q._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].attn.to_q.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[18].attn.to_q.lora_A, 140533131368560) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].attn.to_q.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].attn.to_q.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[18].attn.to_q.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].attn.to_q.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[18].attn.to_q.lora_A['default_0'], 140533127358992) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].attn.to_q.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].attn.to_q.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[18].attn.to_q.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].attn.to_q.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].attn.to_q.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[18].attn.to_q.lora_A['default_0'].weight, 140526662041824) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].attn.to_q.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[18].attn.to_q.lora_B, 140533127355008) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].attn.to_q.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].attn.to_q.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[18].attn.to_q.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].attn.to_q.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[18].attn.to_q.lora_B['default_0'], 140533127357216) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].attn.to_q.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].attn.to_q.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[18].attn.to_q.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].attn.to_q.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[18].attn.to_q.base_layer, 140581770774320) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].attn.to_q.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].attn.to_q.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[18].attn.to_q.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].attn.to_q.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[18].attn.to_q.lora_dropout, 140533131364720) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].attn.to_q.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].attn.to_q.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[18].attn.to_q.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].attn.to_q.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[18].attn.to_q.lora_dropout['default_0'], 140533131363952) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].attn.to_q.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].attn.to_q.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[18].attn.to_q.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].attn.to_q.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[18].attn.to_q.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[18].attn.to_q.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].attn.to_q.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[18].attn.to_q.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].attn.to_q.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[18].attn.to_q.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[18].attn.to_q.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].attn.to_q.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[18].attn.to_q.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].attn.to_q._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].attn.to_q._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].attn.to_q.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[18].attn.to_q.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[18].attn.to_q.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].attn.to_q._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[18].attn.to_q._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].attn.to_q._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].attn.to_q._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].attn.to_q._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[18].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[18].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].attn.to_v, accessed_by=DictGetItemGuardAccessor(to_v) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[18].attn.to_v, 140533127362784) # value = attn.to_v(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1718 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].attn.to_v.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[18].attn.to_v.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].attn.to_v.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[18].attn.to_v.training, 140591004393408) # value = attn.to_v(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1718 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].attn.to_v._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].attn.to_v.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[18].attn.to_v.lora_A, 140533127362928) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].attn.to_v.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].attn.to_v.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[18].attn.to_v.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].attn.to_v.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[18].attn.to_v.lora_A['default_0'], 140533219441632) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].attn.to_v.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].attn.to_v.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[18].attn.to_v.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].attn.to_v.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].attn.to_v.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[18].attn.to_v.lora_A['default_0'].weight, 140526675031632) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].attn.to_v.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[18].attn.to_v.lora_B, 140533127350496) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].attn.to_v.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].attn.to_v.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[18].attn.to_v.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].attn.to_v.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[18].attn.to_v.lora_B['default_0'], 140533217964576) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].attn.to_v.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].attn.to_v.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[18].attn.to_v.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].attn.to_v.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[18].attn.to_v.base_layer, 140581770774368) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].attn.to_v.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].attn.to_v.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[18].attn.to_v.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].attn.to_v.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[18].attn.to_v.lora_dropout, 140533127355392) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].attn.to_v.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].attn.to_v.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[18].attn.to_v.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].attn.to_v.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[18].attn.to_v.lora_dropout['default_0'], 140533127362832) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].attn.to_v.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].attn.to_v.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[18].attn.to_v.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].attn.to_v.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[18].attn.to_v.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[18].attn.to_v.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].attn.to_v.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[18].attn.to_v.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].attn.to_v.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[18].attn.to_v.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[18].attn.to_v.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].attn.to_v.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[18].attn.to_v.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].attn.to_v._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].attn.to_v._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].attn.to_v.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[18].attn.to_v.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[18].attn.to_v.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].attn.to_v._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[18].attn.to_v._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].attn.to_v._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].attn.to_v._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].attn.to_v._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[18].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[18].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].attn.norm_k, accessed_by=DictGetItemGuardAccessor(norm_k) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[18].attn.norm_k, 140581770774272) # if attn.norm_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1729 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].attn.norm_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[18].attn.norm_k.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].attn.norm_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[18].attn.norm_k.training, 140591004393440) # if attn.norm_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1729 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].attn.norm_k.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[18].attn.norm_k.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].attn.norm_k._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].attn.norm_k.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[18].attn.norm_k.weight, 140581783349232) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].attn.norm_k._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].attn.norm_k._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].attn.norm_k._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].attn.norm_k._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].attn.norm_q, accessed_by=DictGetItemGuardAccessor(norm_q) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[18].attn.norm_q, 140581770774128) # if attn.norm_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1727 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].attn.norm_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[18].attn.norm_q.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].attn.norm_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[18].attn.norm_q.training, 140591004393440) # if attn.norm_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1727 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].attn.norm_q.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[18].attn.norm_q.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].attn.norm_q._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].attn.norm_q.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[18].attn.norm_q.weight, 140581772716976) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].attn.norm_q._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].attn.norm_q._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].attn.norm_q._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].attn.norm_q._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].attn.heads, accessed_by=DictGetItemGuardAccessor(heads) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[18].attn.heads == 24 # head_dim = inner_dim // attn.heads # diffusers/src/diffusers/models/attention_processor.py:1721 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].attn.processor, accessed_by=DictGetItemGuardAccessor(processor) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[18].attn.processor, 93831581524080) # attn_parameters = set(inspect.signature(self.processor.__call__).parameters.keys()) # diffusers/src/diffusers/models/attention_processor.py:479 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[18].attn.processor, 140581770774032) # return self.processor( # diffusers/src/diffusers/models/attention_processor.py:490 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].attn._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].attn._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].attn._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].attn._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].attn.forward, accessed_by=GetAttrGuardAccessor(forward) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].attn.forward, accessed_by=FuncDefaultsGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].attn.forward.__defaults__[0], accessed_by=GetItemGuardAccessor(0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[18].attn.forward.__defaults__[0], 140591004478624) # batch_size, _, _ = hidden_states.shape if encoder_hidden_states is None else encoder_hidden_states.shape # diffusers/src/diffusers/models/attention_processor.py:1713 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].norm, accessed_by=DictGetItemGuardAccessor(norm) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[18].norm, 140581770773696) # norm_hidden_states, gate = self.norm(hidden_states, emb=temb) # diffusers/src/diffusers/models/transformers/transformer_flux.py:88 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].norm.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[18].norm.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].norm.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[18].norm.training, 140591004393440) # norm_hidden_states, gate = self.norm(hidden_states, emb=temb) # diffusers/src/diffusers/models/transformers/transformer_flux.py:88 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].norm._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].norm.norm, accessed_by=DictGetItemGuardAccessor(norm) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[18].norm.norm, 140581770773840) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:171 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].norm.norm.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].norm.norm.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[18].norm.norm.training, 140591004393440) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:171 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].norm.silu, accessed_by=DictGetItemGuardAccessor(silu) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[18].norm.silu, 140581770773744) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].norm.silu.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].norm.silu.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[18].norm.silu.training, 140591004393440) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].norm.linear, accessed_by=DictGetItemGuardAccessor(linear) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[18].norm.linear, 140533214787232) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].norm.linear.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[18].norm.linear.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].norm.linear.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[18].norm.linear.training, 140591004393408) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].norm.linear._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].norm.linear.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[18].norm.linear.lora_A, 140533216521680) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].norm.linear.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].norm.linear.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[18].norm.linear.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].norm.linear.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[18].norm.linear.lora_A['default_0'], 140533218176128) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].norm.linear.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].norm.linear.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[18].norm.linear.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].norm.linear.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].norm.linear.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[18].norm.linear.lora_A['default_0'].weight, 140526662051984) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].norm.linear.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[18].norm.linear.lora_B, 140533216630512) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].norm.linear.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].norm.linear.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[18].norm.linear.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].norm.linear.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[18].norm.linear.lora_B['default_0'], 140533218302304) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].norm.linear.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].norm.linear.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[18].norm.linear.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].norm.linear.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[18].norm.linear.base_layer, 140581770773792) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].norm.linear.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].norm.linear.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[18].norm.linear.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].norm.linear.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[18].norm.linear.lora_dropout, 140533216522496) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].norm.linear.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].norm.linear.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[18].norm.linear.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].norm.linear.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[18].norm.linear.lora_dropout['default_0'], 140533216517408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].norm.linear.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].norm.linear.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[18].norm.linear.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].norm.linear.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[18].norm.linear.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[18].norm.linear.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].norm.linear.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[18].norm.linear.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].norm.linear.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[18].norm.linear.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[18].norm.linear.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].norm.linear.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[18].norm.linear.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].norm.linear._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].norm.linear._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].norm.linear.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[18].norm.linear.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[18].norm.linear.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].norm.linear._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[18].norm.linear._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].norm.linear._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].norm.linear._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].norm.linear._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[18].norm.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[18].norm.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].norm._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].norm._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].norm._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].norm._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].act_mlp, accessed_by=DictGetItemGuardAccessor(act_mlp) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[18].act_mlp, 140581770773936) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].act_mlp.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].act_mlp.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[18].act_mlp.training, 140591004393440) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].proj_mlp, accessed_by=DictGetItemGuardAccessor(proj_mlp) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[18].proj_mlp, 140533218300576) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].proj_mlp.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[18].proj_mlp.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].proj_mlp.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[18].proj_mlp.training, 140591004393408) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].proj_mlp._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].proj_mlp.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[18].proj_mlp.lora_A, 140533218309696) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].proj_mlp.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].proj_mlp.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[18].proj_mlp.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].proj_mlp.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[18].proj_mlp.lora_A['default_0'], 140533217740016) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].proj_mlp.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].proj_mlp.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[18].proj_mlp.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].proj_mlp.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].proj_mlp.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[18].proj_mlp.lora_A['default_0'].weight, 140526662050304) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].proj_mlp.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[18].proj_mlp.lora_B, 140533218300096) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].proj_mlp.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].proj_mlp.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[18].proj_mlp.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].proj_mlp.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[18].proj_mlp.lora_B['default_0'], 140533217745296) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].proj_mlp.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].proj_mlp.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[18].proj_mlp.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].proj_mlp.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[18].proj_mlp.base_layer, 140581770773888) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].proj_mlp.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].proj_mlp.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[18].proj_mlp.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].proj_mlp.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[18].proj_mlp.lora_dropout, 140533218311616) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].proj_mlp.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].proj_mlp.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[18].proj_mlp.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].proj_mlp.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[18].proj_mlp.lora_dropout['default_0'], 140533218309792) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].proj_mlp.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].proj_mlp.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[18].proj_mlp.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].proj_mlp.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[18].proj_mlp.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[18].proj_mlp.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].proj_mlp.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[18].proj_mlp.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].proj_mlp.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[18].proj_mlp.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[18].proj_mlp.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].proj_mlp.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[18].proj_mlp.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].proj_mlp._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].proj_mlp._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].proj_mlp.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[18].proj_mlp.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[18].proj_mlp.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].proj_mlp._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[18].proj_mlp._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].proj_mlp._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].proj_mlp._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].proj_mlp._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[18].proj_mlp._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[18].proj_mlp._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].proj_out, accessed_by=DictGetItemGuardAccessor(proj_out) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[18].proj_out, 140533130134480) # hidden_states = gate * self.proj_out(hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:98 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].proj_out.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[18].proj_out.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].proj_out.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[18].proj_out.training, 140591004393408) # hidden_states = gate * self.proj_out(hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:98 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].proj_out._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].proj_out.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[18].proj_out.lora_A, 140533130135440) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].proj_out.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].proj_out.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[18].proj_out.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].proj_out.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[18].proj_out.lora_A['default_0'], 140533131365200) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].proj_out.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].proj_out.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[18].proj_out.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].proj_out.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].proj_out.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[18].proj_out.lora_A['default_0'].weight, 140526662051424) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].proj_out.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[18].proj_out.lora_B, 140533130134912) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].proj_out.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].proj_out.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[18].proj_out.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].proj_out.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[18].proj_out.lora_B['default_0'], 140533131362560) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].proj_out.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].proj_out.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[18].proj_out.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].proj_out.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[18].proj_out.base_layer, 140581770773984) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].proj_out.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].proj_out.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[18].proj_out.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].proj_out.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[18].proj_out.lora_dropout, 140533130135536) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].proj_out.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].proj_out.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[18].proj_out.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].proj_out.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[18].proj_out.lora_dropout['default_0'], 140533130135920) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].proj_out.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].proj_out.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[18].proj_out.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].proj_out.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[18].proj_out.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[18].proj_out.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].proj_out.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[18].proj_out.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].proj_out.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[18].proj_out.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[18].proj_out.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].proj_out.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[18].proj_out.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].proj_out._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].proj_out._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].proj_out.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[18].proj_out.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[18].proj_out.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].proj_out._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[18].proj_out._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].proj_out._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].proj_out._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18].proj_out._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[18].proj_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[18].proj_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[18]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19], accessed_by=GetItemGuardAccessor(19) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[19], 140581770773648) # for index_block, block in enumerate(self.single_transformer_blocks): # diffusers/src/diffusers/models/transformers/transformer_flux.py:509 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[19].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[19].training, 140591004393440) # for index_block, block in enumerate(self.single_transformer_blocks): # diffusers/src/diffusers/models/transformers/transformer_flux.py:509 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].attn, accessed_by=DictGetItemGuardAccessor(attn) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[19].attn, 140581770774848) # attn_output = self.attn( # diffusers/src/diffusers/models/transformers/transformer_flux.py:91 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].attn.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[19].attn.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].attn.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[19].attn.training, 140591004393440) # attn_output = self.attn( # diffusers/src/diffusers/models/transformers/transformer_flux.py:91 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].attn._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].attn.to_k, accessed_by=DictGetItemGuardAccessor(to_k) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[19].attn.to_k, 140533219607792) # key = attn.to_k(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1717 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].attn.to_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[19].attn.to_k.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].attn.to_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[19].attn.to_k.training, 140591004393408) # key = attn.to_k(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1717 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].attn.to_k._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].attn.to_k.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[19].attn.to_k.lora_A, 140533219611728) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].attn.to_k.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].attn.to_k.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[19].attn.to_k.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].attn.to_k.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[19].attn.to_k.lora_A['default_0'], 140533219607264) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].attn.to_k.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].attn.to_k.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[19].attn.to_k.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].attn.to_k.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].attn.to_k.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[19].attn.to_k.lora_A['default_0'].weight, 140526771683472) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].attn.to_k.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[19].attn.to_k.lora_B, 140533219613216) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].attn.to_k.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].attn.to_k.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[19].attn.to_k.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].attn.to_k.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[19].attn.to_k.lora_B['default_0'], 140533219607504) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].attn.to_k.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].attn.to_k.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[19].attn.to_k.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].attn.to_k.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[19].attn.to_k.base_layer, 140581770774992) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].attn.to_k.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].attn.to_k.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[19].attn.to_k.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].attn.to_k.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[19].attn.to_k.lora_dropout, 140533219611968) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].attn.to_k.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].attn.to_k.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[19].attn.to_k.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].attn.to_k.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[19].attn.to_k.lora_dropout['default_0'], 140533219608944) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].attn.to_k.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].attn.to_k.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[19].attn.to_k.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].attn.to_k.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[19].attn.to_k.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[19].attn.to_k.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].attn.to_k.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[19].attn.to_k.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].attn.to_k.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[19].attn.to_k.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[19].attn.to_k.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].attn.to_k.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[19].attn.to_k.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].attn.to_k._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].attn.to_k._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].attn.to_k.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[19].attn.to_k.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[19].attn.to_k.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].attn.to_k._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[19].attn.to_k._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].attn.to_k._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].attn.to_k._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].attn.to_k._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[19].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[19].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].attn.to_q, accessed_by=DictGetItemGuardAccessor(to_q) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[19].attn.to_q, 140533219616384) # query = attn.to_q(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1716 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].attn.to_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[19].attn.to_q.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].attn.to_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[19].attn.to_q.training, 140591004393408) # query = attn.to_q(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1716 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].attn.to_q._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].attn.to_q.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[19].attn.to_q.lora_A, 140533219616288) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].attn.to_q.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].attn.to_q.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[19].attn.to_q.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].attn.to_q.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[19].attn.to_q.lora_A['default_0'], 140533219613840) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].attn.to_q.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].attn.to_q.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[19].attn.to_q.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].attn.to_q.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].attn.to_q.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[19].attn.to_q.lora_A['default_0'].weight, 140526771695072) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].attn.to_q.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[19].attn.to_q.lora_B, 140533219617632) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].attn.to_q.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].attn.to_q.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[19].attn.to_q.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].attn.to_q.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[19].attn.to_q.lora_B['default_0'], 140533219611008) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].attn.to_q.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].attn.to_q.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[19].attn.to_q.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].attn.to_q.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[19].attn.to_q.base_layer, 140581770775088) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].attn.to_q.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].attn.to_q.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[19].attn.to_q.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].attn.to_q.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[19].attn.to_q.lora_dropout, 140533219618592) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].attn.to_q.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].attn.to_q.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[19].attn.to_q.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].attn.to_q.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[19].attn.to_q.lora_dropout['default_0'], 140533219616096) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].attn.to_q.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].attn.to_q.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[19].attn.to_q.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].attn.to_q.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[19].attn.to_q.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[19].attn.to_q.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].attn.to_q.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[19].attn.to_q.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].attn.to_q.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[19].attn.to_q.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[19].attn.to_q.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].attn.to_q.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[19].attn.to_q.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].attn.to_q._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].attn.to_q._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].attn.to_q.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[19].attn.to_q.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[19].attn.to_q.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].attn.to_q._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[19].attn.to_q._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].attn.to_q._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].attn.to_q._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].attn.to_q._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[19].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[19].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].attn.to_v, accessed_by=DictGetItemGuardAccessor(to_v) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[19].attn.to_v, 140533219611872) # value = attn.to_v(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1718 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].attn.to_v.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[19].attn.to_v.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].attn.to_v.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[19].attn.to_v.training, 140591004393408) # value = attn.to_v(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1718 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].attn.to_v._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].attn.to_v.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[19].attn.to_v.lora_A, 140533219618112) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].attn.to_v.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].attn.to_v.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[19].attn.to_v.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].attn.to_v.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[19].attn.to_v.lora_A['default_0'], 140533218605952) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].attn.to_v.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].attn.to_v.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[19].attn.to_v.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].attn.to_v.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].attn.to_v.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[19].attn.to_v.lora_A['default_0'].weight, 140526653933184) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].attn.to_v.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[19].attn.to_v.lora_B, 140533219622528) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].attn.to_v.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].attn.to_v.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[19].attn.to_v.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].attn.to_v.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[19].attn.to_v.lora_B['default_0'], 140533218601056) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].attn.to_v.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].attn.to_v.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[19].attn.to_v.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].attn.to_v.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[19].attn.to_v.base_layer, 140581770775136) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].attn.to_v.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].attn.to_v.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[19].attn.to_v.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].attn.to_v.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[19].attn.to_v.lora_dropout, 140533219609088) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].attn.to_v.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].attn.to_v.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[19].attn.to_v.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].attn.to_v.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[19].attn.to_v.lora_dropout['default_0'], 140533219614608) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].attn.to_v.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].attn.to_v.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[19].attn.to_v.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].attn.to_v.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[19].attn.to_v.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[19].attn.to_v.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].attn.to_v.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[19].attn.to_v.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].attn.to_v.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[19].attn.to_v.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[19].attn.to_v.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].attn.to_v.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[19].attn.to_v.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].attn.to_v._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].attn.to_v._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].attn.to_v.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[19].attn.to_v.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[19].attn.to_v.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].attn.to_v._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[19].attn.to_v._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].attn.to_v._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].attn.to_v._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].attn.to_v._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[19].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[19].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].attn.norm_k, accessed_by=DictGetItemGuardAccessor(norm_k) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[19].attn.norm_k, 140581770775040) # if attn.norm_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1729 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].attn.norm_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[19].attn.norm_k.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].attn.norm_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[19].attn.norm_k.training, 140591004393440) # if attn.norm_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1729 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].attn.norm_k.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[19].attn.norm_k.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].attn.norm_k._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].attn.norm_k.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[19].attn.norm_k.weight, 140581772719056) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].attn.norm_k._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].attn.norm_k._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].attn.norm_k._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].attn.norm_k._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].attn.norm_q, accessed_by=DictGetItemGuardAccessor(norm_q) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[19].attn.norm_q, 140581770774896) # if attn.norm_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1727 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].attn.norm_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[19].attn.norm_q.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].attn.norm_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[19].attn.norm_q.training, 140591004393440) # if attn.norm_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1727 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].attn.norm_q.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[19].attn.norm_q.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].attn.norm_q._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].attn.norm_q.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[19].attn.norm_q.weight, 140581765885248) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].attn.norm_q._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].attn.norm_q._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].attn.norm_q._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].attn.norm_q._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].attn.heads, accessed_by=DictGetItemGuardAccessor(heads) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[19].attn.heads == 24 # head_dim = inner_dim // attn.heads # diffusers/src/diffusers/models/attention_processor.py:1721 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].attn.processor, accessed_by=DictGetItemGuardAccessor(processor) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[19].attn.processor, 93831581524080) # attn_parameters = set(inspect.signature(self.processor.__call__).parameters.keys()) # diffusers/src/diffusers/models/attention_processor.py:479 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[19].attn.processor, 140581770774800) # return self.processor( # diffusers/src/diffusers/models/attention_processor.py:490 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].attn._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].attn._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].attn._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].attn._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].attn.forward, accessed_by=GetAttrGuardAccessor(forward) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].attn.forward, accessed_by=FuncDefaultsGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].attn.forward.__defaults__[0], accessed_by=GetItemGuardAccessor(0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[19].attn.forward.__defaults__[0], 140591004478624) # batch_size, _, _ = hidden_states.shape if encoder_hidden_states is None else encoder_hidden_states.shape # diffusers/src/diffusers/models/attention_processor.py:1713 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].norm, accessed_by=DictGetItemGuardAccessor(norm) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[19].norm, 140581770774464) # norm_hidden_states, gate = self.norm(hidden_states, emb=temb) # diffusers/src/diffusers/models/transformers/transformer_flux.py:88 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].norm.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[19].norm.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].norm.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[19].norm.training, 140591004393440) # norm_hidden_states, gate = self.norm(hidden_states, emb=temb) # diffusers/src/diffusers/models/transformers/transformer_flux.py:88 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].norm._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].norm.norm, accessed_by=DictGetItemGuardAccessor(norm) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[19].norm.norm, 140581770774608) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:171 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].norm.norm.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].norm.norm.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[19].norm.norm.training, 140591004393440) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:171 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].norm.silu, accessed_by=DictGetItemGuardAccessor(silu) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[19].norm.silu, 140581770774512) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].norm.silu.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].norm.silu.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[19].norm.silu.training, 140591004393440) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].norm.linear, accessed_by=DictGetItemGuardAccessor(linear) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[19].norm.linear, 140533217964480) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].norm.linear.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[19].norm.linear.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].norm.linear.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[19].norm.linear.training, 140591004393408) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].norm.linear._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].norm.linear.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[19].norm.linear.lora_A, 140533217951952) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].norm.linear.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].norm.linear.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[19].norm.linear.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].norm.linear.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[19].norm.linear.lora_A['default_0'], 140533219257184) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].norm.linear.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].norm.linear.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[19].norm.linear.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].norm.linear.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].norm.linear.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[19].norm.linear.lora_A['default_0'].weight, 140526771694272) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].norm.linear.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[19].norm.linear.lora_B, 140533217954928) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].norm.linear.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].norm.linear.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[19].norm.linear.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].norm.linear.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[19].norm.linear.lora_B['default_0'], 140533219246624) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].norm.linear.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].norm.linear.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[19].norm.linear.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].norm.linear.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[19].norm.linear.base_layer, 140581770774560) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].norm.linear.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].norm.linear.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[19].norm.linear.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].norm.linear.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[19].norm.linear.lora_dropout, 140533217966976) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].norm.linear.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].norm.linear.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[19].norm.linear.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].norm.linear.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[19].norm.linear.lora_dropout['default_0'], 140533217958048) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].norm.linear.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].norm.linear.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[19].norm.linear.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].norm.linear.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[19].norm.linear.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[19].norm.linear.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].norm.linear.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[19].norm.linear.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].norm.linear.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[19].norm.linear.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[19].norm.linear.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].norm.linear.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[19].norm.linear.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].norm.linear._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].norm.linear._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].norm.linear.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[19].norm.linear.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[19].norm.linear.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].norm.linear._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[19].norm.linear._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].norm.linear._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].norm.linear._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].norm.linear._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[19].norm.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[19].norm.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].norm._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].norm._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].norm._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].norm._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].act_mlp, accessed_by=DictGetItemGuardAccessor(act_mlp) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[19].act_mlp, 140581770774704) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].act_mlp.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].act_mlp.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[19].act_mlp.training, 140591004393440) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].proj_mlp, accessed_by=DictGetItemGuardAccessor(proj_mlp) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[19].proj_mlp, 140533217499104) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].proj_mlp.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[19].proj_mlp.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].proj_mlp.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[19].proj_mlp.training, 140591004393408) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].proj_mlp._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].proj_mlp.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[19].proj_mlp.lora_A, 140533217506448) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].proj_mlp.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].proj_mlp.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[19].proj_mlp.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].proj_mlp.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[19].proj_mlp.lora_A['default_0'], 140533218968288) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].proj_mlp.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].proj_mlp.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[19].proj_mlp.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].proj_mlp.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].proj_mlp.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[19].proj_mlp.lora_A['default_0'].weight, 140526771698512) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].proj_mlp.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[19].proj_mlp.lora_B, 140533218971264) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].proj_mlp.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].proj_mlp.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[19].proj_mlp.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].proj_mlp.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[19].proj_mlp.lora_B['default_0'], 140533218811408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].proj_mlp.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].proj_mlp.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[19].proj_mlp.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].proj_mlp.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[19].proj_mlp.base_layer, 140581770774656) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].proj_mlp.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].proj_mlp.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[19].proj_mlp.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].proj_mlp.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[19].proj_mlp.lora_dropout, 140533217500448) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].proj_mlp.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].proj_mlp.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[19].proj_mlp.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].proj_mlp.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[19].proj_mlp.lora_dropout['default_0'], 140533217495984) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].proj_mlp.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].proj_mlp.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[19].proj_mlp.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].proj_mlp.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[19].proj_mlp.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[19].proj_mlp.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].proj_mlp.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[19].proj_mlp.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].proj_mlp.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[19].proj_mlp.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[19].proj_mlp.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].proj_mlp.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[19].proj_mlp.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].proj_mlp._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].proj_mlp._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].proj_mlp.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[19].proj_mlp.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[19].proj_mlp.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].proj_mlp._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[19].proj_mlp._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].proj_mlp._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].proj_mlp._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].proj_mlp._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[19].proj_mlp._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[19].proj_mlp._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].proj_out, accessed_by=DictGetItemGuardAccessor(proj_out) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[19].proj_out, 140533214556704) # hidden_states = gate * self.proj_out(hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:98 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].proj_out.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[19].proj_out.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].proj_out.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[19].proj_out.training, 140591004393408) # hidden_states = gate * self.proj_out(hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:98 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].proj_out._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].proj_out.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[19].proj_out.lora_A, 140533219615280) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].proj_out.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].proj_out.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[19].proj_out.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].proj_out.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[19].proj_out.lora_A['default_0'], 140533219621664) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].proj_out.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].proj_out.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[19].proj_out.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].proj_out.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].proj_out.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[19].proj_out.lora_A['default_0'].weight, 140526771684272) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].proj_out.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[19].proj_out.lora_B, 140533219621904) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].proj_out.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].proj_out.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[19].proj_out.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].proj_out.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[19].proj_out.lora_B['default_0'], 140533219621040) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].proj_out.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].proj_out.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[19].proj_out.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].proj_out.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[19].proj_out.base_layer, 140581770774752) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].proj_out.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].proj_out.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[19].proj_out.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].proj_out.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[19].proj_out.lora_dropout, 140533219622288) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].proj_out.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].proj_out.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[19].proj_out.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].proj_out.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[19].proj_out.lora_dropout['default_0'], 140533219613408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].proj_out.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].proj_out.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[19].proj_out.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].proj_out.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[19].proj_out.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[19].proj_out.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].proj_out.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[19].proj_out.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].proj_out.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[19].proj_out.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[19].proj_out.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].proj_out.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[19].proj_out.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].proj_out._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].proj_out._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].proj_out.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[19].proj_out.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[19].proj_out.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].proj_out._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[19].proj_out._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].proj_out._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].proj_out._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19].proj_out._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[19].proj_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[19].proj_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[19]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20], accessed_by=GetItemGuardAccessor(20) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[20], 140581770774416) # for index_block, block in enumerate(self.single_transformer_blocks): # diffusers/src/diffusers/models/transformers/transformer_flux.py:509 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[20].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[20].training, 140591004393440) # for index_block, block in enumerate(self.single_transformer_blocks): # diffusers/src/diffusers/models/transformers/transformer_flux.py:509 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].attn, accessed_by=DictGetItemGuardAccessor(attn) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[20].attn, 140581770775616) # attn_output = self.attn( # diffusers/src/diffusers/models/transformers/transformer_flux.py:91 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].attn.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[20].attn.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].attn.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[20].attn.training, 140591004393440) # attn_output = self.attn( # diffusers/src/diffusers/models/transformers/transformer_flux.py:91 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].attn._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].attn.to_k, accessed_by=DictGetItemGuardAccessor(to_k) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[20].attn.to_k, 140533225365392) # key = attn.to_k(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1717 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].attn.to_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[20].attn.to_k.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].attn.to_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[20].attn.to_k.training, 140591004393408) # key = attn.to_k(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1717 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].attn.to_k._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].attn.to_k.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[20].attn.to_k.lora_A, 140533225362848) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].attn.to_k.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].attn.to_k.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[20].attn.to_k.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].attn.to_k.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[20].attn.to_k.lora_A['default_0'], 140533225362368) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].attn.to_k.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].attn.to_k.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[20].attn.to_k.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].attn.to_k.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].attn.to_k.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[20].attn.to_k.lora_A['default_0'].weight, 140526269501248) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].attn.to_k.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[20].attn.to_k.lora_B, 140533225366160) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].attn.to_k.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].attn.to_k.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[20].attn.to_k.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].attn.to_k.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[20].attn.to_k.lora_B['default_0'], 140533225358528) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].attn.to_k.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].attn.to_k.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[20].attn.to_k.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].attn.to_k.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[20].attn.to_k.base_layer, 140581770775760) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].attn.to_k.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].attn.to_k.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[20].attn.to_k.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].attn.to_k.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[20].attn.to_k.lora_dropout, 140533225360832) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].attn.to_k.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].attn.to_k.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[20].attn.to_k.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].attn.to_k.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[20].attn.to_k.lora_dropout['default_0'], 140533225357376) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].attn.to_k.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].attn.to_k.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[20].attn.to_k.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].attn.to_k.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[20].attn.to_k.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[20].attn.to_k.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].attn.to_k.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[20].attn.to_k.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].attn.to_k.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[20].attn.to_k.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[20].attn.to_k.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].attn.to_k.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[20].attn.to_k.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].attn.to_k._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].attn.to_k._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].attn.to_k.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[20].attn.to_k.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[20].attn.to_k.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].attn.to_k._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[20].attn.to_k._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].attn.to_k._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].attn.to_k._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].attn.to_k._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[20].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[20].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].attn.to_q, accessed_by=DictGetItemGuardAccessor(to_q) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[20].attn.to_q, 140533225372112) # query = attn.to_q(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1716 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].attn.to_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[20].attn.to_q.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].attn.to_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[20].attn.to_q.training, 140591004393408) # query = attn.to_q(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1716 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].attn.to_q._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].attn.to_q.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[20].attn.to_q.lora_A, 140533225368224) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].attn.to_q.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].attn.to_q.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[20].attn.to_q.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].attn.to_q.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[20].attn.to_q.lora_A['default_0'], 140533225371536) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].attn.to_q.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].attn.to_q.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[20].attn.to_q.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].attn.to_q.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].attn.to_q.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[20].attn.to_q.lora_A['default_0'].weight, 140526269509168) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].attn.to_q.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[20].attn.to_q.lora_B, 140533225366784) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].attn.to_q.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].attn.to_q.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[20].attn.to_q.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].attn.to_q.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[20].attn.to_q.lora_B['default_0'], 140533225369664) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].attn.to_q.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].attn.to_q.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[20].attn.to_q.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].attn.to_q.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[20].attn.to_q.base_layer, 140581770775856) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].attn.to_q.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].attn.to_q.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[20].attn.to_q.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].attn.to_q.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[20].attn.to_q.lora_dropout, 140533225371296) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].attn.to_q.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].attn.to_q.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[20].attn.to_q.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].attn.to_q.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[20].attn.to_q.lora_dropout['default_0'], 140533225371344) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].attn.to_q.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].attn.to_q.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[20].attn.to_q.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].attn.to_q.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[20].attn.to_q.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[20].attn.to_q.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].attn.to_q.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[20].attn.to_q.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].attn.to_q.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[20].attn.to_q.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[20].attn.to_q.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].attn.to_q.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[20].attn.to_q.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].attn.to_q._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].attn.to_q._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].attn.to_q.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[20].attn.to_q.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[20].attn.to_q.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].attn.to_q._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[20].attn.to_q._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].attn.to_q._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].attn.to_q._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].attn.to_q._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[20].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[20].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].attn.to_v, accessed_by=DictGetItemGuardAccessor(to_v) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[20].attn.to_v, 140533225358768) # value = attn.to_v(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1718 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].attn.to_v.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[20].attn.to_v.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].attn.to_v.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[20].attn.to_v.training, 140591004393408) # value = attn.to_v(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1718 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].attn.to_v._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].attn.to_v.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[20].attn.to_v.lora_A, 140533225358960) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].attn.to_v.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].attn.to_v.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[20].attn.to_v.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].attn.to_v.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[20].attn.to_v.lora_A['default_0'], 140533225362416) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].attn.to_v.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].attn.to_v.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[20].attn.to_v.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].attn.to_v.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].attn.to_v.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[20].attn.to_v.lora_A['default_0'].weight, 140526269505408) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].attn.to_v.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[20].attn.to_v.lora_B, 140533225362224) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].attn.to_v.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].attn.to_v.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[20].attn.to_v.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].attn.to_v.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[20].attn.to_v.lora_B['default_0'], 140533225364384) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].attn.to_v.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].attn.to_v.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[20].attn.to_v.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].attn.to_v.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[20].attn.to_v.base_layer, 140581770775904) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].attn.to_v.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].attn.to_v.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[20].attn.to_v.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].attn.to_v.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[20].attn.to_v.lora_dropout, 140533225362608) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].attn.to_v.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].attn.to_v.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[20].attn.to_v.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].attn.to_v.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[20].attn.to_v.lora_dropout['default_0'], 140533225362656) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].attn.to_v.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].attn.to_v.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[20].attn.to_v.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].attn.to_v.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[20].attn.to_v.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[20].attn.to_v.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].attn.to_v.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[20].attn.to_v.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].attn.to_v.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[20].attn.to_v.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[20].attn.to_v.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].attn.to_v.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[20].attn.to_v.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].attn.to_v._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].attn.to_v._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].attn.to_v.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[20].attn.to_v.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[20].attn.to_v.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].attn.to_v._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[20].attn.to_v._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].attn.to_v._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].attn.to_v._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].attn.to_v._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[20].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[20].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].attn.norm_k, accessed_by=DictGetItemGuardAccessor(norm_k) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[20].attn.norm_k, 140581770775808) # if attn.norm_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1729 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].attn.norm_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[20].attn.norm_k.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].attn.norm_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[20].attn.norm_k.training, 140591004393440) # if attn.norm_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1729 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].attn.norm_k.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[20].attn.norm_k.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].attn.norm_k._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].attn.norm_k.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[20].attn.norm_k.weight, 140581773351712) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].attn.norm_k._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].attn.norm_k._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].attn.norm_k._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].attn.norm_k._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].attn.norm_q, accessed_by=DictGetItemGuardAccessor(norm_q) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[20].attn.norm_q, 140581770775664) # if attn.norm_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1727 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].attn.norm_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[20].attn.norm_q.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].attn.norm_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[20].attn.norm_q.training, 140591004393440) # if attn.norm_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1727 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].attn.norm_q.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[20].attn.norm_q.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].attn.norm_q._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].attn.norm_q.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[20].attn.norm_q.weight, 140581772774352) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].attn.norm_q._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].attn.norm_q._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].attn.norm_q._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].attn.norm_q._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].attn.heads, accessed_by=DictGetItemGuardAccessor(heads) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[20].attn.heads == 24 # head_dim = inner_dim // attn.heads # diffusers/src/diffusers/models/attention_processor.py:1721 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].attn.processor, accessed_by=DictGetItemGuardAccessor(processor) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[20].attn.processor, 93831581524080) # attn_parameters = set(inspect.signature(self.processor.__call__).parameters.keys()) # diffusers/src/diffusers/models/attention_processor.py:479 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[20].attn.processor, 140581770775568) # return self.processor( # diffusers/src/diffusers/models/attention_processor.py:490 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].attn._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].attn._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].attn._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].attn._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].attn.forward, accessed_by=GetAttrGuardAccessor(forward) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].attn.forward, accessed_by=FuncDefaultsGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].attn.forward.__defaults__[0], accessed_by=GetItemGuardAccessor(0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[20].attn.forward.__defaults__[0], 140591004478624) # batch_size, _, _ = hidden_states.shape if encoder_hidden_states is None else encoder_hidden_states.shape # diffusers/src/diffusers/models/attention_processor.py:1713 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].norm, accessed_by=DictGetItemGuardAccessor(norm) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[20].norm, 140581770775232) # norm_hidden_states, gate = self.norm(hidden_states, emb=temb) # diffusers/src/diffusers/models/transformers/transformer_flux.py:88 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].norm.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[20].norm.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].norm.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[20].norm.training, 140591004393440) # norm_hidden_states, gate = self.norm(hidden_states, emb=temb) # diffusers/src/diffusers/models/transformers/transformer_flux.py:88 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].norm._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].norm.norm, accessed_by=DictGetItemGuardAccessor(norm) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[20].norm.norm, 140581770775376) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:171 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].norm.norm.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].norm.norm.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[20].norm.norm.training, 140591004393440) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:171 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].norm.silu, accessed_by=DictGetItemGuardAccessor(silu) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[20].norm.silu, 140581770775280) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].norm.silu.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].norm.silu.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[20].norm.silu.training, 140591004393440) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].norm.linear, accessed_by=DictGetItemGuardAccessor(linear) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[20].norm.linear, 140533224364528) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].norm.linear.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[20].norm.linear.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].norm.linear.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[20].norm.linear.training, 140591004393408) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].norm.linear._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].norm.linear.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[20].norm.linear.lora_A, 140533224374176) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].norm.linear.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].norm.linear.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[20].norm.linear.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].norm.linear.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[20].norm.linear.lora_A['default_0'], 140533224367984) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].norm.linear.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].norm.linear.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[20].norm.linear.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].norm.linear.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].norm.linear.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[20].norm.linear.lora_A['default_0'].weight, 140526653946864) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].norm.linear.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[20].norm.linear.lora_B, 140533224372352) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].norm.linear.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].norm.linear.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[20].norm.linear.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].norm.linear.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[20].norm.linear.lora_B['default_0'], 140533224368608) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].norm.linear.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].norm.linear.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[20].norm.linear.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].norm.linear.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[20].norm.linear.base_layer, 140581770775328) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].norm.linear.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].norm.linear.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[20].norm.linear.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].norm.linear.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[20].norm.linear.lora_dropout, 140533224369808) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].norm.linear.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].norm.linear.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[20].norm.linear.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].norm.linear.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[20].norm.linear.lora_dropout['default_0'], 140533224368128) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].norm.linear.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].norm.linear.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[20].norm.linear.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].norm.linear.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[20].norm.linear.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[20].norm.linear.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].norm.linear.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[20].norm.linear.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].norm.linear.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[20].norm.linear.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[20].norm.linear.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].norm.linear.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[20].norm.linear.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].norm.linear._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].norm.linear._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].norm.linear.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[20].norm.linear.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[20].norm.linear.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].norm.linear._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[20].norm.linear._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].norm.linear._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].norm.linear._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].norm.linear._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[20].norm.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[20].norm.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].norm._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].norm._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].norm._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].norm._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].act_mlp, accessed_by=DictGetItemGuardAccessor(act_mlp) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[20].act_mlp, 140581770775472) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].act_mlp.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].act_mlp.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[20].act_mlp.training, 140591004393440) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].proj_mlp, accessed_by=DictGetItemGuardAccessor(proj_mlp) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[20].proj_mlp, 140533224371728) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].proj_mlp.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[20].proj_mlp.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].proj_mlp.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[20].proj_mlp.training, 140591004393408) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].proj_mlp._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].proj_mlp.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[20].proj_mlp.lora_A, 140533224372688) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].proj_mlp.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].proj_mlp.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[20].proj_mlp.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].proj_mlp.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[20].proj_mlp.lora_A['default_0'], 140533224362320) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].proj_mlp.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].proj_mlp.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[20].proj_mlp.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].proj_mlp.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].proj_mlp.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[20].proj_mlp.lora_A['default_0'].weight, 140526653941904) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].proj_mlp.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[20].proj_mlp.lora_B, 140533224362128) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].proj_mlp.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].proj_mlp.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[20].proj_mlp.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].proj_mlp.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[20].proj_mlp.lora_B['default_0'], 140533224363904) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].proj_mlp.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].proj_mlp.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[20].proj_mlp.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].proj_mlp.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[20].proj_mlp.base_layer, 140581770775424) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].proj_mlp.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].proj_mlp.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[20].proj_mlp.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].proj_mlp.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[20].proj_mlp.lora_dropout, 140533224360736) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].proj_mlp.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].proj_mlp.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[20].proj_mlp.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].proj_mlp.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[20].proj_mlp.lora_dropout['default_0'], 140533224362224) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].proj_mlp.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].proj_mlp.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[20].proj_mlp.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].proj_mlp.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[20].proj_mlp.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[20].proj_mlp.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].proj_mlp.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[20].proj_mlp.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].proj_mlp.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[20].proj_mlp.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[20].proj_mlp.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].proj_mlp.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[20].proj_mlp.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].proj_mlp._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].proj_mlp._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].proj_mlp.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[20].proj_mlp.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[20].proj_mlp.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].proj_mlp._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[20].proj_mlp._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].proj_mlp._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].proj_mlp._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].proj_mlp._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[20].proj_mlp._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[20].proj_mlp._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].proj_out, accessed_by=DictGetItemGuardAccessor(proj_out) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[20].proj_out, 140533224366832) # hidden_states = gate * self.proj_out(hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:98 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].proj_out.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[20].proj_out.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].proj_out.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[20].proj_out.training, 140591004393408) # hidden_states = gate * self.proj_out(hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:98 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].proj_out._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].proj_out.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[20].proj_out.lora_A, 140533224363808) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].proj_out.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].proj_out.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[20].proj_out.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].proj_out.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[20].proj_out.lora_A['default_0'], 140533224358240) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].proj_out.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].proj_out.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[20].proj_out.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].proj_out.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].proj_out.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[20].proj_out.lora_A['default_0'].weight, 140526653942704) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].proj_out.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[20].proj_out.lora_B, 140533224358528) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].proj_out.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].proj_out.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[20].proj_out.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].proj_out.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[20].proj_out.lora_B['default_0'], 140533224368320) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].proj_out.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].proj_out.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[20].proj_out.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].proj_out.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[20].proj_out.base_layer, 140581770775520) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].proj_out.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].proj_out.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[20].proj_out.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].proj_out.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[20].proj_out.lora_dropout, 140533224360352) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].proj_out.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].proj_out.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[20].proj_out.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].proj_out.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[20].proj_out.lora_dropout['default_0'], 140533224360400) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].proj_out.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].proj_out.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[20].proj_out.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].proj_out.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[20].proj_out.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[20].proj_out.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].proj_out.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[20].proj_out.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].proj_out.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[20].proj_out.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[20].proj_out.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].proj_out.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[20].proj_out.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].proj_out._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].proj_out._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].proj_out.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[20].proj_out.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[20].proj_out.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].proj_out._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[20].proj_out._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].proj_out._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].proj_out._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20].proj_out._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[20].proj_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[20].proj_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[20]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21], accessed_by=GetItemGuardAccessor(21) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[21], 140581770775184) # for index_block, block in enumerate(self.single_transformer_blocks): # diffusers/src/diffusers/models/transformers/transformer_flux.py:509 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[21].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[21].training, 140591004393440) # for index_block, block in enumerate(self.single_transformer_blocks): # diffusers/src/diffusers/models/transformers/transformer_flux.py:509 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].attn, accessed_by=DictGetItemGuardAccessor(attn) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[21].attn, 140581770776384) # attn_output = self.attn( # diffusers/src/diffusers/models/transformers/transformer_flux.py:91 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].attn.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[21].attn.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].attn.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[21].attn.training, 140591004393440) # attn_output = self.attn( # diffusers/src/diffusers/models/transformers/transformer_flux.py:91 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].attn._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].attn.to_k, accessed_by=DictGetItemGuardAccessor(to_k) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[21].attn.to_k, 140533225863056) # key = attn.to_k(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1717 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].attn.to_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[21].attn.to_k.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].attn.to_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[21].attn.to_k.training, 140591004393408) # key = attn.to_k(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1717 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].attn.to_k._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].attn.to_k.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[21].attn.to_k.lora_A, 140533225857392) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].attn.to_k.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].attn.to_k.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[21].attn.to_k.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].attn.to_k.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[21].attn.to_k.lora_A['default_0'], 140533225864928) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].attn.to_k.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].attn.to_k.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[21].attn.to_k.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].attn.to_k.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].attn.to_k.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[21].attn.to_k.lora_A['default_0'].weight, 140526269508848) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].attn.to_k.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[21].attn.to_k.lora_B, 140533225865120) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].attn.to_k.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].attn.to_k.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[21].attn.to_k.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].attn.to_k.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[21].attn.to_k.lora_B['default_0'], 140533225859552) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].attn.to_k.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].attn.to_k.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[21].attn.to_k.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].attn.to_k.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[21].attn.to_k.base_layer, 140581770776528) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].attn.to_k.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].attn.to_k.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[21].attn.to_k.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].attn.to_k.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[21].attn.to_k.lora_dropout, 140533225864304) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].attn.to_k.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].attn.to_k.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[21].attn.to_k.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].attn.to_k.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[21].attn.to_k.lora_dropout['default_0'], 140533225864256) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].attn.to_k.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].attn.to_k.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[21].attn.to_k.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].attn.to_k.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[21].attn.to_k.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[21].attn.to_k.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].attn.to_k.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[21].attn.to_k.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].attn.to_k.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[21].attn.to_k.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[21].attn.to_k.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].attn.to_k.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[21].attn.to_k.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].attn.to_k._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].attn.to_k._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].attn.to_k.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[21].attn.to_k.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[21].attn.to_k.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].attn.to_k._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[21].attn.to_k._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].attn.to_k._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].attn.to_k._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].attn.to_k._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[21].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[21].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].attn.to_q, accessed_by=DictGetItemGuardAccessor(to_q) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[21].attn.to_q, 140533226589488) # query = attn.to_q(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1716 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].attn.to_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[21].attn.to_q.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].attn.to_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[21].attn.to_q.training, 140591004393408) # query = attn.to_q(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1716 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].attn.to_q._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].attn.to_q.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[21].attn.to_q.lora_A, 140533226590784) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].attn.to_q.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].attn.to_q.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[21].attn.to_q.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].attn.to_q.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[21].attn.to_q.lora_A['default_0'], 140533225859744) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].attn.to_q.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].attn.to_q.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[21].attn.to_q.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].attn.to_q.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].attn.to_q.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[21].attn.to_q.lora_A['default_0'].weight, 140526269510208) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].attn.to_q.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[21].attn.to_q.lora_B, 140533226586560) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].attn.to_q.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].attn.to_q.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[21].attn.to_q.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].attn.to_q.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[21].attn.to_q.lora_B['default_0'], 140533225864160) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].attn.to_q.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].attn.to_q.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[21].attn.to_q.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].attn.to_q.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[21].attn.to_q.base_layer, 140581770776624) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].attn.to_q.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].attn.to_q.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[21].attn.to_q.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].attn.to_q.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[21].attn.to_q.lora_dropout, 140533226586896) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].attn.to_q.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].attn.to_q.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[21].attn.to_q.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].attn.to_q.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[21].attn.to_q.lora_dropout['default_0'], 140533226587232) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].attn.to_q.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].attn.to_q.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[21].attn.to_q.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].attn.to_q.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[21].attn.to_q.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[21].attn.to_q.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].attn.to_q.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[21].attn.to_q.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].attn.to_q.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[21].attn.to_q.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[21].attn.to_q.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].attn.to_q.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[21].attn.to_q.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].attn.to_q._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].attn.to_q._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].attn.to_q.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[21].attn.to_q.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[21].attn.to_q.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].attn.to_q._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[21].attn.to_q._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].attn.to_q._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].attn.to_q._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].attn.to_q._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[21].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[21].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].attn.to_v, accessed_by=DictGetItemGuardAccessor(to_v) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[21].attn.to_v, 140533225857680) # value = attn.to_v(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1718 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].attn.to_v.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[21].attn.to_v.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].attn.to_v.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[21].attn.to_v.training, 140591004393408) # value = attn.to_v(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1718 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].attn.to_v._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].attn.to_v.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[21].attn.to_v.lora_A, 140533225857200) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].attn.to_v.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].attn.to_v.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[21].attn.to_v.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].attn.to_v.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[21].attn.to_v.lora_A['default_0'], 140533225862048) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].attn.to_v.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].attn.to_v.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[21].attn.to_v.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].attn.to_v.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].attn.to_v.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[21].attn.to_v.lora_A['default_0'].weight, 140537659713280) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].attn.to_v.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[21].attn.to_v.lora_B, 140533225862720) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].attn.to_v.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].attn.to_v.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[21].attn.to_v.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].attn.to_v.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[21].attn.to_v.lora_B['default_0'], 140533225853408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].attn.to_v.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].attn.to_v.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[21].attn.to_v.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].attn.to_v.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[21].attn.to_v.base_layer, 140581770776672) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].attn.to_v.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].attn.to_v.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[21].attn.to_v.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].attn.to_v.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[21].attn.to_v.lora_dropout, 140533225863104) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].attn.to_v.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].attn.to_v.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[21].attn.to_v.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].attn.to_v.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[21].attn.to_v.lora_dropout['default_0'], 140533225860608) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].attn.to_v.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].attn.to_v.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[21].attn.to_v.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].attn.to_v.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[21].attn.to_v.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[21].attn.to_v.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].attn.to_v.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[21].attn.to_v.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].attn.to_v.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[21].attn.to_v.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[21].attn.to_v.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].attn.to_v.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[21].attn.to_v.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].attn.to_v._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].attn.to_v._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].attn.to_v.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[21].attn.to_v.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[21].attn.to_v.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].attn.to_v._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[21].attn.to_v._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].attn.to_v._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].attn.to_v._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].attn.to_v._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[21].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[21].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].attn.norm_k, accessed_by=DictGetItemGuardAccessor(norm_k) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[21].attn.norm_k, 140581770776576) # if attn.norm_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1729 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].attn.norm_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[21].attn.norm_k.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].attn.norm_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[21].attn.norm_k.training, 140591004393440) # if attn.norm_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1729 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].attn.norm_k.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[21].attn.norm_k.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].attn.norm_k._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].attn.norm_k.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[21].attn.norm_k.weight, 140581772774032) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].attn.norm_k._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].attn.norm_k._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].attn.norm_k._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].attn.norm_k._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].attn.norm_q, accessed_by=DictGetItemGuardAccessor(norm_q) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[21].attn.norm_q, 140581770776432) # if attn.norm_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1727 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].attn.norm_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[21].attn.norm_q.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].attn.norm_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[21].attn.norm_q.training, 140591004393440) # if attn.norm_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1727 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].attn.norm_q.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[21].attn.norm_q.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].attn.norm_q._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].attn.norm_q.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[21].attn.norm_q.weight, 140581772782992) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].attn.norm_q._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].attn.norm_q._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].attn.norm_q._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].attn.norm_q._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].attn.heads, accessed_by=DictGetItemGuardAccessor(heads) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[21].attn.heads == 24 # head_dim = inner_dim // attn.heads # diffusers/src/diffusers/models/attention_processor.py:1721 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].attn.processor, accessed_by=DictGetItemGuardAccessor(processor) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[21].attn.processor, 93831581524080) # attn_parameters = set(inspect.signature(self.processor.__call__).parameters.keys()) # diffusers/src/diffusers/models/attention_processor.py:479 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[21].attn.processor, 140581770776336) # return self.processor( # diffusers/src/diffusers/models/attention_processor.py:490 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].attn._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].attn._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].attn._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].attn._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].attn.forward, accessed_by=GetAttrGuardAccessor(forward) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].attn.forward, accessed_by=FuncDefaultsGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].attn.forward.__defaults__[0], accessed_by=GetItemGuardAccessor(0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[21].attn.forward.__defaults__[0], 140591004478624) # batch_size, _, _ = hidden_states.shape if encoder_hidden_states is None else encoder_hidden_states.shape # diffusers/src/diffusers/models/attention_processor.py:1713 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].norm, accessed_by=DictGetItemGuardAccessor(norm) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[21].norm, 140581770776000) # norm_hidden_states, gate = self.norm(hidden_states, emb=temb) # diffusers/src/diffusers/models/transformers/transformer_flux.py:88 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].norm.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[21].norm.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].norm.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[21].norm.training, 140591004393440) # norm_hidden_states, gate = self.norm(hidden_states, emb=temb) # diffusers/src/diffusers/models/transformers/transformer_flux.py:88 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].norm._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].norm.norm, accessed_by=DictGetItemGuardAccessor(norm) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[21].norm.norm, 140581770776144) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:171 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].norm.norm.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].norm.norm.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[21].norm.norm.training, 140591004393440) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:171 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].norm.silu, accessed_by=DictGetItemGuardAccessor(silu) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[21].norm.silu, 140581770776048) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].norm.silu.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].norm.silu.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[21].norm.silu.training, 140591004393440) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].norm.linear, accessed_by=DictGetItemGuardAccessor(linear) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[21].norm.linear, 140533225364480) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].norm.linear.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[21].norm.linear.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].norm.linear.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[21].norm.linear.training, 140591004393408) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].norm.linear._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].norm.linear.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[21].norm.linear.lora_A, 140533225365920) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].norm.linear.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].norm.linear.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[21].norm.linear.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].norm.linear.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[21].norm.linear.lora_A['default_0'], 140533226600144) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].norm.linear.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].norm.linear.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[21].norm.linear.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].norm.linear.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].norm.linear.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[21].norm.linear.lora_A['default_0'].weight, 140526269503328) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].norm.linear.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[21].norm.linear.lora_B, 140533226595968) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].norm.linear.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].norm.linear.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[21].norm.linear.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].norm.linear.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[21].norm.linear.lora_B['default_0'], 140533226596832) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].norm.linear.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].norm.linear.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[21].norm.linear.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].norm.linear.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[21].norm.linear.base_layer, 140581770776096) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].norm.linear.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].norm.linear.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[21].norm.linear.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].norm.linear.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[21].norm.linear.lora_dropout, 140533225362128) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].norm.linear.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].norm.linear.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[21].norm.linear.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].norm.linear.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[21].norm.linear.lora_dropout['default_0'], 140533225362560) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].norm.linear.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].norm.linear.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[21].norm.linear.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].norm.linear.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[21].norm.linear.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[21].norm.linear.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].norm.linear.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[21].norm.linear.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].norm.linear.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[21].norm.linear.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[21].norm.linear.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].norm.linear.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[21].norm.linear.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].norm.linear._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].norm.linear._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].norm.linear.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[21].norm.linear.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[21].norm.linear.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].norm.linear._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[21].norm.linear._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].norm.linear._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].norm.linear._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].norm.linear._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[21].norm.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[21].norm.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].norm._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].norm._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].norm._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].norm._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].act_mlp, accessed_by=DictGetItemGuardAccessor(act_mlp) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[21].act_mlp, 140581770776240) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].act_mlp.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].act_mlp.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[21].act_mlp.training, 140591004393440) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].proj_mlp, accessed_by=DictGetItemGuardAccessor(proj_mlp) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[21].proj_mlp, 140533226599328) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].proj_mlp.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[21].proj_mlp.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].proj_mlp.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[21].proj_mlp.training, 140591004393408) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].proj_mlp._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].proj_mlp.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[21].proj_mlp.lora_A, 140533226601920) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].proj_mlp.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].proj_mlp.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[21].proj_mlp.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].proj_mlp.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[21].proj_mlp.lora_A['default_0'], 140533226591792) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].proj_mlp.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].proj_mlp.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[21].proj_mlp.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].proj_mlp.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].proj_mlp.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[21].proj_mlp.lora_A['default_0'].weight, 140526269506128) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].proj_mlp.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[21].proj_mlp.lora_B, 140533226592032) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].proj_mlp.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].proj_mlp.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[21].proj_mlp.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].proj_mlp.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[21].proj_mlp.lora_B['default_0'], 140533226594240) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].proj_mlp.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].proj_mlp.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[21].proj_mlp.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].proj_mlp.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[21].proj_mlp.base_layer, 140581770776192) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].proj_mlp.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].proj_mlp.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[21].proj_mlp.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].proj_mlp.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[21].proj_mlp.lora_dropout, 140533226594192) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].proj_mlp.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].proj_mlp.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[21].proj_mlp.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].proj_mlp.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[21].proj_mlp.lora_dropout['default_0'], 140533226592992) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].proj_mlp.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].proj_mlp.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[21].proj_mlp.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].proj_mlp.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[21].proj_mlp.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[21].proj_mlp.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].proj_mlp.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[21].proj_mlp.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].proj_mlp.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[21].proj_mlp.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[21].proj_mlp.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].proj_mlp.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[21].proj_mlp.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].proj_mlp._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].proj_mlp._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].proj_mlp.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[21].proj_mlp.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[21].proj_mlp.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].proj_mlp._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[21].proj_mlp._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].proj_mlp._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].proj_mlp._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].proj_mlp._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[21].proj_mlp._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[21].proj_mlp._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].proj_out, accessed_by=DictGetItemGuardAccessor(proj_out) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[21].proj_out, 140533226595104) # hidden_states = gate * self.proj_out(hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:98 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].proj_out.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[21].proj_out.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].proj_out.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[21].proj_out.training, 140591004393408) # hidden_states = gate * self.proj_out(hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:98 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].proj_out._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].proj_out.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[21].proj_out.lora_A, 140533226593664) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].proj_out.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].proj_out.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[21].proj_out.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].proj_out.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[21].proj_out.lora_A['default_0'], 140533226589152) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].proj_out.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].proj_out.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[21].proj_out.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].proj_out.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].proj_out.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[21].proj_out.lora_A['default_0'].weight, 140526269510528) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].proj_out.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[21].proj_out.lora_B, 140533226589056) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].proj_out.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].proj_out.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[21].proj_out.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].proj_out.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[21].proj_out.lora_B['default_0'], 140533226587376) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].proj_out.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].proj_out.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[21].proj_out.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].proj_out.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[21].proj_out.base_layer, 140581770776288) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].proj_out.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].proj_out.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[21].proj_out.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].proj_out.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[21].proj_out.lora_dropout, 140533226588576) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].proj_out.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].proj_out.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[21].proj_out.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].proj_out.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[21].proj_out.lora_dropout['default_0'], 140533226587520) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].proj_out.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].proj_out.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[21].proj_out.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].proj_out.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[21].proj_out.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[21].proj_out.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].proj_out.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[21].proj_out.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].proj_out.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[21].proj_out.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[21].proj_out.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].proj_out.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[21].proj_out.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].proj_out._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].proj_out._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].proj_out.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[21].proj_out.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[21].proj_out.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].proj_out._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[21].proj_out._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].proj_out._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].proj_out._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21].proj_out._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[21].proj_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[21].proj_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[21]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22], accessed_by=GetItemGuardAccessor(22) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[22], 140581770775952) # for index_block, block in enumerate(self.single_transformer_blocks): # diffusers/src/diffusers/models/transformers/transformer_flux.py:509 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[22].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[22].training, 140591004393440) # for index_block, block in enumerate(self.single_transformer_blocks): # diffusers/src/diffusers/models/transformers/transformer_flux.py:509 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].attn, accessed_by=DictGetItemGuardAccessor(attn) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[22].attn, 140581770777152) # attn_output = self.attn( # diffusers/src/diffusers/models/transformers/transformer_flux.py:91 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].attn.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[22].attn.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].attn.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[22].attn.training, 140591004393440) # attn_output = self.attn( # diffusers/src/diffusers/models/transformers/transformer_flux.py:91 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].attn._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].attn.to_k, accessed_by=DictGetItemGuardAccessor(to_k) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[22].attn.to_k, 140533227553024) # key = attn.to_k(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1717 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].attn.to_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[22].attn.to_k.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].attn.to_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[22].attn.to_k.training, 140591004393408) # key = attn.to_k(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1717 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].attn.to_k._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].attn.to_k.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[22].attn.to_k.lora_A, 140533227553504) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].attn.to_k.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].attn.to_k.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[22].attn.to_k.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].attn.to_k.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[22].attn.to_k.lora_A['default_0'], 140533227569056) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].attn.to_k.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].attn.to_k.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[22].attn.to_k.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].attn.to_k.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].attn.to_k.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[22].attn.to_k.lora_A['default_0'].weight, 140526555707760) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].attn.to_k.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[22].attn.to_k.lora_B, 140533227552832) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].attn.to_k.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].attn.to_k.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[22].attn.to_k.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].attn.to_k.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[22].attn.to_k.lora_B['default_0'], 140533227562672) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].attn.to_k.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].attn.to_k.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[22].attn.to_k.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].attn.to_k.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[22].attn.to_k.base_layer, 140581770777296) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].attn.to_k.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].attn.to_k.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[22].attn.to_k.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].attn.to_k.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[22].attn.to_k.lora_dropout, 140533227567136) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].attn.to_k.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].attn.to_k.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[22].attn.to_k.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].attn.to_k.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[22].attn.to_k.lora_dropout['default_0'], 140533227555376) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].attn.to_k.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].attn.to_k.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[22].attn.to_k.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].attn.to_k.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[22].attn.to_k.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[22].attn.to_k.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].attn.to_k.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[22].attn.to_k.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].attn.to_k.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[22].attn.to_k.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[22].attn.to_k.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].attn.to_k.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[22].attn.to_k.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].attn.to_k._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].attn.to_k._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].attn.to_k.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[22].attn.to_k.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[22].attn.to_k.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].attn.to_k._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[22].attn.to_k._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].attn.to_k._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].attn.to_k._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].attn.to_k._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[22].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[22].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].attn.to_q, accessed_by=DictGetItemGuardAccessor(to_q) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[22].attn.to_q, 140533227556816) # query = attn.to_q(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1716 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].attn.to_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[22].attn.to_q.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].attn.to_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[22].attn.to_q.training, 140591004393408) # query = attn.to_q(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1716 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].attn.to_q._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].attn.to_q.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[22].attn.to_q.lora_A, 140533227559168) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].attn.to_q.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].attn.to_q.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[22].attn.to_q.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].attn.to_q.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[22].attn.to_q.lora_A['default_0'], 140533227560704) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].attn.to_q.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].attn.to_q.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[22].attn.to_q.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].attn.to_q.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].attn.to_q.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[22].attn.to_q.lora_A['default_0'].weight, 140526555693360) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].attn.to_q.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[22].attn.to_q.lora_B, 140533227560464) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].attn.to_q.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].attn.to_q.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[22].attn.to_q.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].attn.to_q.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[22].attn.to_q.lora_B['default_0'], 140533227556624) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].attn.to_q.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].attn.to_q.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[22].attn.to_q.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].attn.to_q.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[22].attn.to_q.base_layer, 140581770777392) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].attn.to_q.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].attn.to_q.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[22].attn.to_q.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].attn.to_q.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[22].attn.to_q.lora_dropout, 140533227560944) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].attn.to_q.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].attn.to_q.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[22].attn.to_q.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].attn.to_q.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[22].attn.to_q.lora_dropout['default_0'], 140533227557488) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].attn.to_q.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].attn.to_q.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[22].attn.to_q.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].attn.to_q.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[22].attn.to_q.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[22].attn.to_q.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].attn.to_q.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[22].attn.to_q.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].attn.to_q.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[22].attn.to_q.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[22].attn.to_q.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].attn.to_q.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[22].attn.to_q.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].attn.to_q._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].attn.to_q._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].attn.to_q.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[22].attn.to_q.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[22].attn.to_q.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].attn.to_q._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[22].attn.to_q._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].attn.to_q._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].attn.to_q._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].attn.to_q._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[22].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[22].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].attn.to_v, accessed_by=DictGetItemGuardAccessor(to_v) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[22].attn.to_v, 140533230197040) # value = attn.to_v(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1718 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].attn.to_v.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[22].attn.to_v.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].attn.to_v.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[22].attn.to_v.training, 140591004393408) # value = attn.to_v(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1718 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].attn.to_v._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].attn.to_v.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[22].attn.to_v.lora_A, 140533230206832) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].attn.to_v.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].attn.to_v.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[22].attn.to_v.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].attn.to_v.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[22].attn.to_v.lora_A['default_0'], 140533230200928) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].attn.to_v.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].attn.to_v.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[22].attn.to_v.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].attn.to_v.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].attn.to_v.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[22].attn.to_v.lora_A['default_0'].weight, 140526655307536) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].attn.to_v.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[22].attn.to_v.lora_B, 140533230202800) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].attn.to_v.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].attn.to_v.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[22].attn.to_v.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].attn.to_v.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[22].attn.to_v.lora_B['default_0'], 140533230201456) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].attn.to_v.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].attn.to_v.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[22].attn.to_v.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].attn.to_v.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[22].attn.to_v.base_layer, 140581770777440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].attn.to_v.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].attn.to_v.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[22].attn.to_v.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].attn.to_v.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[22].attn.to_v.lora_dropout, 140533230199440) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].attn.to_v.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].attn.to_v.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[22].attn.to_v.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].attn.to_v.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[22].attn.to_v.lora_dropout['default_0'], 140533230200976) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].attn.to_v.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].attn.to_v.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[22].attn.to_v.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].attn.to_v.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[22].attn.to_v.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[22].attn.to_v.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].attn.to_v.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[22].attn.to_v.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].attn.to_v.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[22].attn.to_v.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[22].attn.to_v.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].attn.to_v.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[22].attn.to_v.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].attn.to_v._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].attn.to_v._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].attn.to_v.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[22].attn.to_v.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[22].attn.to_v.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].attn.to_v._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[22].attn.to_v._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].attn.to_v._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].attn.to_v._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].attn.to_v._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[22].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[22].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].attn.norm_k, accessed_by=DictGetItemGuardAccessor(norm_k) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[22].attn.norm_k, 140581770777344) # if attn.norm_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1729 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].attn.norm_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[22].attn.norm_k.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].attn.norm_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[22].attn.norm_k.training, 140591004393440) # if attn.norm_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1729 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].attn.norm_k.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[22].attn.norm_k.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].attn.norm_k._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].attn.norm_k.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[22].attn.norm_k.weight, 140581772783952) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].attn.norm_k._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].attn.norm_k._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].attn.norm_k._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].attn.norm_k._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].attn.norm_q, accessed_by=DictGetItemGuardAccessor(norm_q) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[22].attn.norm_q, 140581770777200) # if attn.norm_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1727 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].attn.norm_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[22].attn.norm_q.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].attn.norm_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[22].attn.norm_q.training, 140591004393440) # if attn.norm_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1727 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].attn.norm_q.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[22].attn.norm_q.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].attn.norm_q._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].attn.norm_q.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[22].attn.norm_q.weight, 140581773260208) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].attn.norm_q._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].attn.norm_q._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].attn.norm_q._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].attn.norm_q._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].attn.heads, accessed_by=DictGetItemGuardAccessor(heads) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[22].attn.heads == 24 # head_dim = inner_dim // attn.heads # diffusers/src/diffusers/models/attention_processor.py:1721 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].attn.processor, accessed_by=DictGetItemGuardAccessor(processor) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[22].attn.processor, 93831581524080) # attn_parameters = set(inspect.signature(self.processor.__call__).parameters.keys()) # diffusers/src/diffusers/models/attention_processor.py:479 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[22].attn.processor, 140581770777104) # return self.processor( # diffusers/src/diffusers/models/attention_processor.py:490 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].attn._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].attn._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].attn._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].attn._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].attn.forward, accessed_by=GetAttrGuardAccessor(forward) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].attn.forward, accessed_by=FuncDefaultsGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].attn.forward.__defaults__[0], accessed_by=GetItemGuardAccessor(0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[22].attn.forward.__defaults__[0], 140591004478624) # batch_size, _, _ = hidden_states.shape if encoder_hidden_states is None else encoder_hidden_states.shape # diffusers/src/diffusers/models/attention_processor.py:1713 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].norm, accessed_by=DictGetItemGuardAccessor(norm) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[22].norm, 140581770776768) # norm_hidden_states, gate = self.norm(hidden_states, emb=temb) # diffusers/src/diffusers/models/transformers/transformer_flux.py:88 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].norm.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[22].norm.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].norm.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[22].norm.training, 140591004393440) # norm_hidden_states, gate = self.norm(hidden_states, emb=temb) # diffusers/src/diffusers/models/transformers/transformer_flux.py:88 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].norm._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].norm.norm, accessed_by=DictGetItemGuardAccessor(norm) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[22].norm.norm, 140581770776912) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:171 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].norm.norm.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].norm.norm.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[22].norm.norm.training, 140591004393440) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:171 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].norm.silu, accessed_by=DictGetItemGuardAccessor(silu) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[22].norm.silu, 140581770776816) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].norm.silu.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].norm.silu.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[22].norm.silu.training, 140591004393440) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].norm.linear, accessed_by=DictGetItemGuardAccessor(linear) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[22].norm.linear, 140533225850096) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].norm.linear.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[22].norm.linear.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].norm.linear.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[22].norm.linear.training, 140591004393408) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].norm.linear._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].norm.linear.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[22].norm.linear.lora_A, 140533225850816) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].norm.linear.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].norm.linear.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[22].norm.linear.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].norm.linear.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[22].norm.linear.lora_A['default_0'], 140533225853024) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].norm.linear.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].norm.linear.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[22].norm.linear.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].norm.linear.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].norm.linear.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[22].norm.linear.lora_A['default_0'].weight, 140526553938208) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].norm.linear.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[22].norm.linear.lora_B, 140533225852976) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].norm.linear.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].norm.linear.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[22].norm.linear.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].norm.linear.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[22].norm.linear.lora_B['default_0'], 140533225854272) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].norm.linear.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].norm.linear.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[22].norm.linear.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].norm.linear.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[22].norm.linear.base_layer, 140581770776864) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].norm.linear.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].norm.linear.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[22].norm.linear.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].norm.linear.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[22].norm.linear.lora_dropout, 140533225851728) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].norm.linear.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].norm.linear.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[22].norm.linear.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].norm.linear.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[22].norm.linear.lora_dropout['default_0'], 140533225853936) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].norm.linear.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].norm.linear.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[22].norm.linear.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].norm.linear.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[22].norm.linear.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[22].norm.linear.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].norm.linear.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[22].norm.linear.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].norm.linear.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[22].norm.linear.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[22].norm.linear.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].norm.linear.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[22].norm.linear.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].norm.linear._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].norm.linear._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].norm.linear.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[22].norm.linear.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[22].norm.linear.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].norm.linear._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[22].norm.linear._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].norm.linear._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].norm.linear._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].norm.linear._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[22].norm.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[22].norm.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].norm._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].norm._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].norm._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].norm._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].act_mlp, accessed_by=DictGetItemGuardAccessor(act_mlp) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[22].act_mlp, 140581770777008) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].act_mlp.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].act_mlp.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[22].act_mlp.training, 140591004393440) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].proj_mlp, accessed_by=DictGetItemGuardAccessor(proj_mlp) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[22].proj_mlp, 140533225864112) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].proj_mlp.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[22].proj_mlp.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].proj_mlp.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[22].proj_mlp.training, 140591004393408) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].proj_mlp._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].proj_mlp.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[22].proj_mlp.lora_A, 140533225849424) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].proj_mlp.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].proj_mlp.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[22].proj_mlp.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].proj_mlp.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[22].proj_mlp.lora_A['default_0'], 140533227560416) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].proj_mlp.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].proj_mlp.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[22].proj_mlp.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].proj_mlp.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].proj_mlp.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[22].proj_mlp.lora_A['default_0'].weight, 140526553927248) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].proj_mlp.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[22].proj_mlp.lora_B, 140533225855616) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].proj_mlp.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].proj_mlp.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[22].proj_mlp.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].proj_mlp.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[22].proj_mlp.lora_B['default_0'], 140533227565600) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].proj_mlp.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].proj_mlp.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[22].proj_mlp.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].proj_mlp.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[22].proj_mlp.base_layer, 140581770776960) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].proj_mlp.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].proj_mlp.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[22].proj_mlp.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].proj_mlp.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[22].proj_mlp.lora_dropout, 140533225860944) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].proj_mlp.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].proj_mlp.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[22].proj_mlp.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].proj_mlp.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[22].proj_mlp.lora_dropout['default_0'], 140533225860704) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].proj_mlp.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].proj_mlp.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[22].proj_mlp.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].proj_mlp.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[22].proj_mlp.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[22].proj_mlp.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].proj_mlp.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[22].proj_mlp.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].proj_mlp.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[22].proj_mlp.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[22].proj_mlp.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].proj_mlp.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[22].proj_mlp.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].proj_mlp._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].proj_mlp._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].proj_mlp.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[22].proj_mlp.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[22].proj_mlp.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].proj_mlp._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[22].proj_mlp._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].proj_mlp._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].proj_mlp._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].proj_mlp._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[22].proj_mlp._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[22].proj_mlp._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].proj_out, accessed_by=DictGetItemGuardAccessor(proj_out) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[22].proj_out, 140533227565168) # hidden_states = gate * self.proj_out(hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:98 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].proj_out.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[22].proj_out.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].proj_out.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[22].proj_out.training, 140591004393408) # hidden_states = gate * self.proj_out(hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:98 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].proj_out._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].proj_out.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[22].proj_out.lora_A, 140533227563104) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].proj_out.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].proj_out.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[22].proj_out.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].proj_out.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[22].proj_out.lora_A['default_0'], 140533227566656) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].proj_out.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].proj_out.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[22].proj_out.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].proj_out.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].proj_out.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[22].proj_out.lora_A['default_0'].weight, 140526555707120) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].proj_out.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[22].proj_out.lora_B, 140533227566704) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].proj_out.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].proj_out.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[22].proj_out.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].proj_out.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[22].proj_out.lora_B['default_0'], 140533227566464) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].proj_out.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].proj_out.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[22].proj_out.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].proj_out.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[22].proj_out.base_layer, 140581770777056) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].proj_out.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].proj_out.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[22].proj_out.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].proj_out.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[22].proj_out.lora_dropout, 140533227567328) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].proj_out.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].proj_out.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[22].proj_out.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].proj_out.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[22].proj_out.lora_dropout['default_0'], 140533227566896) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].proj_out.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].proj_out.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[22].proj_out.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].proj_out.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[22].proj_out.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[22].proj_out.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].proj_out.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[22].proj_out.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].proj_out.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[22].proj_out.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[22].proj_out.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].proj_out.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[22].proj_out.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].proj_out._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].proj_out._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].proj_out.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[22].proj_out.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[22].proj_out.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].proj_out._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[22].proj_out._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].proj_out._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].proj_out._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22].proj_out._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[22].proj_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[22].proj_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[22]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23], accessed_by=GetItemGuardAccessor(23) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[23], 140581770776720) # for index_block, block in enumerate(self.single_transformer_blocks): # diffusers/src/diffusers/models/transformers/transformer_flux.py:509 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[23].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[23].training, 140591004393440) # for index_block, block in enumerate(self.single_transformer_blocks): # diffusers/src/diffusers/models/transformers/transformer_flux.py:509 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].attn, accessed_by=DictGetItemGuardAccessor(attn) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[23].attn, 140581770777920) # attn_output = self.attn( # diffusers/src/diffusers/models/transformers/transformer_flux.py:91 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].attn.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[23].attn.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].attn.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[23].attn.training, 140591004393440) # attn_output = self.attn( # diffusers/src/diffusers/models/transformers/transformer_flux.py:91 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].attn._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].attn.to_k, accessed_by=DictGetItemGuardAccessor(to_k) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[23].attn.to_k, 140533231224432) # key = attn.to_k(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1717 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].attn.to_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[23].attn.to_k.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].attn.to_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[23].attn.to_k.training, 140591004393408) # key = attn.to_k(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1717 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].attn.to_k._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].attn.to_k.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[23].attn.to_k.lora_A, 140533231224624) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].attn.to_k.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].attn.to_k.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[23].attn.to_k.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].attn.to_k.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[23].attn.to_k.lora_A['default_0'], 140533231228080) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].attn.to_k.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].attn.to_k.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[23].attn.to_k.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].attn.to_k.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].attn.to_k.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[23].attn.to_k.lora_A['default_0'].weight, 140526259722480) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].attn.to_k.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[23].attn.to_k.lora_B, 140533231227888) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].attn.to_k.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].attn.to_k.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[23].attn.to_k.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].attn.to_k.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[23].attn.to_k.lora_B['default_0'], 140533231230048) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].attn.to_k.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].attn.to_k.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[23].attn.to_k.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].attn.to_k.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[23].attn.to_k.base_layer, 140581770778064) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].attn.to_k.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].attn.to_k.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[23].attn.to_k.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].attn.to_k.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[23].attn.to_k.lora_dropout, 140533231228272) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].attn.to_k.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].attn.to_k.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[23].attn.to_k.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].attn.to_k.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[23].attn.to_k.lora_dropout['default_0'], 140533231228320) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].attn.to_k.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].attn.to_k.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[23].attn.to_k.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].attn.to_k.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[23].attn.to_k.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[23].attn.to_k.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].attn.to_k.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[23].attn.to_k.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].attn.to_k.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[23].attn.to_k.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[23].attn.to_k.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].attn.to_k.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[23].attn.to_k.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].attn.to_k._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].attn.to_k._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].attn.to_k.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[23].attn.to_k.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[23].attn.to_k.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].attn.to_k._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[23].attn.to_k._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].attn.to_k._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].attn.to_k._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].attn.to_k._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[23].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[23].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].attn.to_q, accessed_by=DictGetItemGuardAccessor(to_q) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[23].attn.to_q, 140533231228032) # query = attn.to_q(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1716 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].attn.to_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[23].attn.to_q.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].attn.to_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[23].attn.to_q.training, 140591004393408) # query = attn.to_q(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1716 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].attn.to_q._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].attn.to_q.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[23].attn.to_q.lora_A, 140533231224864) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].attn.to_q.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].attn.to_q.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[23].attn.to_q.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].attn.to_q.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[23].attn.to_q.lora_A['default_0'], 140533231228704) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].attn.to_q.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].attn.to_q.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[23].attn.to_q.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].attn.to_q.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].attn.to_q.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[23].attn.to_q.lora_A['default_0'].weight, 140526259727920) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].attn.to_q.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[23].attn.to_q.lora_B, 140533231229952) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].attn.to_q.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].attn.to_q.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[23].attn.to_q.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].attn.to_q.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[23].attn.to_q.lora_B['default_0'], 140533231222896) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].attn.to_q.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].attn.to_q.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[23].attn.to_q.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].attn.to_q.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[23].attn.to_q.base_layer, 140581770778160) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].attn.to_q.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].attn.to_q.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[23].attn.to_q.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].attn.to_q.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[23].attn.to_q.lora_dropout, 140533231228992) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].attn.to_q.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].attn.to_q.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[23].attn.to_q.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].attn.to_q.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[23].attn.to_q.lora_dropout['default_0'], 140533231223472) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].attn.to_q.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].attn.to_q.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[23].attn.to_q.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].attn.to_q.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[23].attn.to_q.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[23].attn.to_q.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].attn.to_q.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[23].attn.to_q.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].attn.to_q.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[23].attn.to_q.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[23].attn.to_q.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].attn.to_q.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[23].attn.to_q.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].attn.to_q._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].attn.to_q._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].attn.to_q.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[23].attn.to_q.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[23].attn.to_q.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].attn.to_q._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[23].attn.to_q._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].attn.to_q._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].attn.to_q._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].attn.to_q._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[23].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[23].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].attn.to_v, accessed_by=DictGetItemGuardAccessor(to_v) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[23].attn.to_v, 140533231230240) # value = attn.to_v(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1718 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].attn.to_v.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[23].attn.to_v.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].attn.to_v.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[23].attn.to_v.training, 140591004393408) # value = attn.to_v(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1718 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].attn.to_v._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].attn.to_v.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[23].attn.to_v.lora_A, 140533231231680) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].attn.to_v.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].attn.to_v.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[23].attn.to_v.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].attn.to_v.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[23].attn.to_v.lora_A['default_0'], 140533232481280) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].attn.to_v.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].attn.to_v.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[23].attn.to_v.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].attn.to_v.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].attn.to_v.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[23].attn.to_v.lora_A['default_0'].weight, 140526259729200) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].attn.to_v.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[23].attn.to_v.lora_B, 140533232482192) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].attn.to_v.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].attn.to_v.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[23].attn.to_v.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].attn.to_v.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[23].attn.to_v.lora_B['default_0'], 140533232483824) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].attn.to_v.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].attn.to_v.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[23].attn.to_v.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].attn.to_v.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[23].attn.to_v.base_layer, 140581770778208) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].attn.to_v.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].attn.to_v.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[23].attn.to_v.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].attn.to_v.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[23].attn.to_v.lora_dropout, 140533231227792) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].attn.to_v.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].attn.to_v.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[23].attn.to_v.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].attn.to_v.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[23].attn.to_v.lora_dropout['default_0'], 140533231228224) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].attn.to_v.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].attn.to_v.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[23].attn.to_v.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].attn.to_v.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[23].attn.to_v.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[23].attn.to_v.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].attn.to_v.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[23].attn.to_v.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].attn.to_v.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[23].attn.to_v.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[23].attn.to_v.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].attn.to_v.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[23].attn.to_v.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].attn.to_v._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].attn.to_v._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].attn.to_v.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[23].attn.to_v.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[23].attn.to_v.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].attn.to_v._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[23].attn.to_v._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].attn.to_v._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].attn.to_v._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].attn.to_v._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[23].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[23].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].attn.norm_k, accessed_by=DictGetItemGuardAccessor(norm_k) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[23].attn.norm_k, 140581770778112) # if attn.norm_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1729 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].attn.norm_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[23].attn.norm_k.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].attn.norm_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[23].attn.norm_k.training, 140591004393440) # if attn.norm_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1729 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].attn.norm_k.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[23].attn.norm_k.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].attn.norm_k._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].attn.norm_k.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[23].attn.norm_k.weight, 140581765132864) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].attn.norm_k._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].attn.norm_k._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].attn.norm_k._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].attn.norm_k._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].attn.norm_q, accessed_by=DictGetItemGuardAccessor(norm_q) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[23].attn.norm_q, 140581770777968) # if attn.norm_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1727 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].attn.norm_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[23].attn.norm_q.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].attn.norm_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[23].attn.norm_q.training, 140591004393440) # if attn.norm_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1727 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].attn.norm_q.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[23].attn.norm_q.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].attn.norm_q._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].attn.norm_q.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[23].attn.norm_q.weight, 140581772776352) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].attn.norm_q._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].attn.norm_q._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].attn.norm_q._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].attn.norm_q._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].attn.heads, accessed_by=DictGetItemGuardAccessor(heads) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[23].attn.heads == 24 # head_dim = inner_dim // attn.heads # diffusers/src/diffusers/models/attention_processor.py:1721 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].attn.processor, accessed_by=DictGetItemGuardAccessor(processor) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[23].attn.processor, 93831581524080) # attn_parameters = set(inspect.signature(self.processor.__call__).parameters.keys()) # diffusers/src/diffusers/models/attention_processor.py:479 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[23].attn.processor, 140581770777872) # return self.processor( # diffusers/src/diffusers/models/attention_processor.py:490 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].attn._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].attn._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].attn._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].attn._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].attn.forward, accessed_by=GetAttrGuardAccessor(forward) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].attn.forward, accessed_by=FuncDefaultsGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].attn.forward.__defaults__[0], accessed_by=GetItemGuardAccessor(0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[23].attn.forward.__defaults__[0], 140591004478624) # batch_size, _, _ = hidden_states.shape if encoder_hidden_states is None else encoder_hidden_states.shape # diffusers/src/diffusers/models/attention_processor.py:1713 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].norm, accessed_by=DictGetItemGuardAccessor(norm) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[23].norm, 140581770777536) # norm_hidden_states, gate = self.norm(hidden_states, emb=temb) # diffusers/src/diffusers/models/transformers/transformer_flux.py:88 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].norm.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[23].norm.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].norm.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[23].norm.training, 140591004393440) # norm_hidden_states, gate = self.norm(hidden_states, emb=temb) # diffusers/src/diffusers/models/transformers/transformer_flux.py:88 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].norm._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].norm.norm, accessed_by=DictGetItemGuardAccessor(norm) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[23].norm.norm, 140581770777680) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:171 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].norm.norm.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].norm.norm.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[23].norm.norm.training, 140591004393440) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:171 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].norm.silu, accessed_by=DictGetItemGuardAccessor(silu) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[23].norm.silu, 140581770777584) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].norm.silu.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].norm.silu.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[23].norm.silu.training, 140591004393440) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].norm.linear, accessed_by=DictGetItemGuardAccessor(linear) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[23].norm.linear, 140533230204576) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].norm.linear.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[23].norm.linear.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].norm.linear.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[23].norm.linear.training, 140591004393408) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].norm.linear._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].norm.linear.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[23].norm.linear.lora_A, 140533230205536) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].norm.linear.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].norm.linear.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[23].norm.linear.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].norm.linear.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[23].norm.linear.lora_A['default_0'], 140533230195168) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].norm.linear.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].norm.linear.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[23].norm.linear.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].norm.linear.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].norm.linear.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[23].norm.linear.lora_A['default_0'].weight, 140526655306976) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].norm.linear.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[23].norm.linear.lora_B, 140533230194976) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].norm.linear.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].norm.linear.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[23].norm.linear.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].norm.linear.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[23].norm.linear.lora_B['default_0'], 140533230196752) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].norm.linear.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].norm.linear.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[23].norm.linear.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].norm.linear.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[23].norm.linear.base_layer, 140581770777632) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].norm.linear.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].norm.linear.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[23].norm.linear.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].norm.linear.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[23].norm.linear.lora_dropout, 140533230193584) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].norm.linear.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].norm.linear.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[23].norm.linear.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].norm.linear.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[23].norm.linear.lora_dropout['default_0'], 140533230195072) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].norm.linear.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].norm.linear.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[23].norm.linear.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].norm.linear.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[23].norm.linear.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[23].norm.linear.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].norm.linear.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[23].norm.linear.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].norm.linear.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[23].norm.linear.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[23].norm.linear.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].norm.linear.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[23].norm.linear.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].norm.linear._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].norm.linear._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].norm.linear.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[23].norm.linear.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[23].norm.linear.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].norm.linear._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[23].norm.linear._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].norm.linear._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].norm.linear._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].norm.linear._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[23].norm.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[23].norm.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].norm._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].norm._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].norm._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].norm._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].act_mlp, accessed_by=DictGetItemGuardAccessor(act_mlp) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[23].act_mlp, 140581770777776) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].act_mlp.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].act_mlp.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[23].act_mlp.training, 140591004393440) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].proj_mlp, accessed_by=DictGetItemGuardAccessor(proj_mlp) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[23].proj_mlp, 140533230199776) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].proj_mlp.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[23].proj_mlp.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].proj_mlp.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[23].proj_mlp.training, 140591004393408) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].proj_mlp._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].proj_mlp.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[23].proj_mlp.lora_A, 140533230196656) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].proj_mlp.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].proj_mlp.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[23].proj_mlp.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].proj_mlp.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[23].proj_mlp.lora_A['default_0'], 140533230193200) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].proj_mlp.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].proj_mlp.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[23].proj_mlp.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].proj_mlp.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].proj_mlp.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[23].proj_mlp.lora_A['default_0'].weight, 140526655294256) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].proj_mlp.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[23].proj_mlp.lora_B, 140533230190992) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].proj_mlp.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].proj_mlp.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[23].proj_mlp.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].proj_mlp.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[23].proj_mlp.lora_B['default_0'], 140533230201168) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].proj_mlp.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].proj_mlp.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[23].proj_mlp.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].proj_mlp.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[23].proj_mlp.base_layer, 140581770777728) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].proj_mlp.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].proj_mlp.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[23].proj_mlp.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].proj_mlp.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[23].proj_mlp.lora_dropout, 140533230190752) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].proj_mlp.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].proj_mlp.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[23].proj_mlp.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].proj_mlp.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[23].proj_mlp.lora_dropout['default_0'], 140533230193344) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].proj_mlp.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].proj_mlp.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[23].proj_mlp.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].proj_mlp.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[23].proj_mlp.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[23].proj_mlp.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].proj_mlp.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[23].proj_mlp.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].proj_mlp.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[23].proj_mlp.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[23].proj_mlp.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].proj_mlp.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[23].proj_mlp.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].proj_mlp._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].proj_mlp._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].proj_mlp.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[23].proj_mlp.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[23].proj_mlp.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].proj_mlp._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[23].proj_mlp._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].proj_mlp._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].proj_mlp._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].proj_mlp._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[23].proj_mlp._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[23].proj_mlp._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].proj_out, accessed_by=DictGetItemGuardAccessor(proj_out) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[23].proj_out, 140533231237776) # hidden_states = gate * self.proj_out(hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:98 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].proj_out.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[23].proj_out.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].proj_out.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[23].proj_out.training, 140591004393408) # hidden_states = gate * self.proj_out(hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:98 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].proj_out._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].proj_out.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[23].proj_out.lora_A, 140533231225968) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].proj_out.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].proj_out.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[23].proj_out.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].proj_out.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[23].proj_out.lora_A['default_0'], 140533231237200) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].proj_out.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].proj_out.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[23].proj_out.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].proj_out.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].proj_out.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[23].proj_out.lora_A['default_0'].weight, 140526259720720) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].proj_out.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[23].proj_out.lora_B, 140533231233072) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].proj_out.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].proj_out.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[23].proj_out.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].proj_out.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[23].proj_out.lora_B['default_0'], 140533231234512) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].proj_out.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].proj_out.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[23].proj_out.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].proj_out.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[23].proj_out.base_layer, 140581770777824) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].proj_out.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].proj_out.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[23].proj_out.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].proj_out.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[23].proj_out.lora_dropout, 140533231236960) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].proj_out.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].proj_out.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[23].proj_out.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].proj_out.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[23].proj_out.lora_dropout['default_0'], 140533231237008) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].proj_out.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].proj_out.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[23].proj_out.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].proj_out.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[23].proj_out.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[23].proj_out.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].proj_out.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[23].proj_out.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].proj_out.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[23].proj_out.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[23].proj_out.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].proj_out.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[23].proj_out.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].proj_out._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].proj_out._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].proj_out.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[23].proj_out.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[23].proj_out.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].proj_out._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[23].proj_out._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].proj_out._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].proj_out._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23].proj_out._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[23].proj_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[23].proj_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[23]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24], accessed_by=GetItemGuardAccessor(24) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[24], 140581770777488) # for index_block, block in enumerate(self.single_transformer_blocks): # diffusers/src/diffusers/models/transformers/transformer_flux.py:509 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[24].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[24].training, 140591004393440) # for index_block, block in enumerate(self.single_transformer_blocks): # diffusers/src/diffusers/models/transformers/transformer_flux.py:509 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].attn, accessed_by=DictGetItemGuardAccessor(attn) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[24].attn, 140581770778688) # attn_output = self.attn( # diffusers/src/diffusers/models/transformers/transformer_flux.py:91 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].attn.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[24].attn.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].attn.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[24].attn.training, 140591004393440) # attn_output = self.attn( # diffusers/src/diffusers/models/transformers/transformer_flux.py:91 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].attn._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].attn.to_k, accessed_by=DictGetItemGuardAccessor(to_k) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[24].attn.to_k, 140533233882480) # key = attn.to_k(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1717 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].attn.to_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[24].attn.to_k.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].attn.to_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[24].attn.to_k.training, 140591004393408) # key = attn.to_k(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1717 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].attn.to_k._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].attn.to_k.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[24].attn.to_k.lora_A, 140533233887808) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].attn.to_k.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].attn.to_k.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[24].attn.to_k.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].attn.to_k.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[24].attn.to_k.lora_A['default_0'], 140533233883680) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].attn.to_k.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].attn.to_k.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[24].attn.to_k.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].attn.to_k.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].attn.to_k.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[24].attn.to_k.lora_A['default_0'].weight, 140537661626688) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].attn.to_k.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[24].attn.to_k.lora_B, 140533233885360) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].attn.to_k.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].attn.to_k.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[24].attn.to_k.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].attn.to_k.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[24].attn.to_k.lora_B['default_0'], 140533233884400) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].attn.to_k.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].attn.to_k.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[24].attn.to_k.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].attn.to_k.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[24].attn.to_k.base_layer, 140581770778832) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].attn.to_k.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].attn.to_k.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[24].attn.to_k.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].attn.to_k.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[24].attn.to_k.lora_dropout, 140533233880224) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].attn.to_k.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].attn.to_k.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[24].attn.to_k.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].attn.to_k.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[24].attn.to_k.lora_dropout['default_0'], 140533233884064) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].attn.to_k.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].attn.to_k.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[24].attn.to_k.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].attn.to_k.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[24].attn.to_k.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[24].attn.to_k.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].attn.to_k.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[24].attn.to_k.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].attn.to_k.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[24].attn.to_k.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[24].attn.to_k.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].attn.to_k.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[24].attn.to_k.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].attn.to_k._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].attn.to_k._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].attn.to_k.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[24].attn.to_k.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[24].attn.to_k.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].attn.to_k._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[24].attn.to_k._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].attn.to_k._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].attn.to_k._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].attn.to_k._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[24].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[24].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].attn.to_q, accessed_by=DictGetItemGuardAccessor(to_q) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[24].attn.to_q, 140533233891840) # query = attn.to_q(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1716 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].attn.to_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[24].attn.to_q.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].attn.to_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[24].attn.to_q.training, 140591004393408) # query = attn.to_q(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1716 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].attn.to_q._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].attn.to_q.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[24].attn.to_q.lora_A, 140533233889680) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].attn.to_q.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].attn.to_q.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[24].attn.to_q.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].attn.to_q.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[24].attn.to_q.lora_A['default_0'], 140533233893088) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].attn.to_q.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].attn.to_q.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[24].attn.to_q.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].attn.to_q.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].attn.to_q.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[24].attn.to_q.lora_A['default_0'].weight, 140537661630368) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].attn.to_q.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[24].attn.to_q.lora_B, 140533233886416) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].attn.to_q.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].attn.to_q.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[24].attn.to_q.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].attn.to_q.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[24].attn.to_q.lora_B['default_0'], 140533233884736) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].attn.to_q.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].attn.to_q.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[24].attn.to_q.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].attn.to_q.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[24].attn.to_q.base_layer, 140581770778928) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].attn.to_q.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].attn.to_q.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[24].attn.to_q.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].attn.to_q.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[24].attn.to_q.lora_dropout, 140533233892464) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].attn.to_q.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].attn.to_q.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[24].attn.to_q.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].attn.to_q.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[24].attn.to_q.lora_dropout['default_0'], 140533233887952) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].attn.to_q.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].attn.to_q.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[24].attn.to_q.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].attn.to_q.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[24].attn.to_q.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[24].attn.to_q.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].attn.to_q.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[24].attn.to_q.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].attn.to_q.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[24].attn.to_q.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[24].attn.to_q.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].attn.to_q.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[24].attn.to_q.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].attn.to_q._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].attn.to_q._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].attn.to_q.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[24].attn.to_q.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[24].attn.to_q.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].attn.to_q._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[24].attn.to_q._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].attn.to_q._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].attn.to_q._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].attn.to_q._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[24].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[24].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].attn.to_v, accessed_by=DictGetItemGuardAccessor(to_v) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[24].attn.to_v, 140533233877968) # value = attn.to_v(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1718 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].attn.to_v.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[24].attn.to_v.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].attn.to_v.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[24].attn.to_v.training, 140591004393408) # value = attn.to_v(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1718 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].attn.to_v._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].attn.to_v.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[24].attn.to_v.lora_A, 140533233877536) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].attn.to_v.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].attn.to_v.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[24].attn.to_v.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].attn.to_v.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[24].attn.to_v.lora_A['default_0'], 140533233879264) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].attn.to_v.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].attn.to_v.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[24].attn.to_v.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].attn.to_v.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].attn.to_v.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[24].attn.to_v.lora_A['default_0'].weight, 140537661633408) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].attn.to_v.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[24].attn.to_v.lora_B, 140533233880752) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].attn.to_v.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].attn.to_v.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[24].attn.to_v.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].attn.to_v.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[24].attn.to_v.lora_B['default_0'], 140533233882144) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].attn.to_v.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].attn.to_v.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[24].attn.to_v.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].attn.to_v.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[24].attn.to_v.base_layer, 140581770778976) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].attn.to_v.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].attn.to_v.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[24].attn.to_v.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].attn.to_v.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[24].attn.to_v.lora_dropout, 140533233881664) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].attn.to_v.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].attn.to_v.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[24].attn.to_v.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].attn.to_v.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[24].attn.to_v.lora_dropout['default_0'], 140533233881808) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].attn.to_v.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].attn.to_v.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[24].attn.to_v.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].attn.to_v.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[24].attn.to_v.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[24].attn.to_v.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].attn.to_v.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[24].attn.to_v.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].attn.to_v.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[24].attn.to_v.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[24].attn.to_v.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].attn.to_v.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[24].attn.to_v.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].attn.to_v._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].attn.to_v._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].attn.to_v.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[24].attn.to_v.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[24].attn.to_v.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].attn.to_v._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[24].attn.to_v._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].attn.to_v._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].attn.to_v._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].attn.to_v._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[24].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[24].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].attn.norm_k, accessed_by=DictGetItemGuardAccessor(norm_k) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[24].attn.norm_k, 140581770778880) # if attn.norm_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1729 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].attn.norm_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[24].attn.norm_k.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].attn.norm_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[24].attn.norm_k.training, 140591004393440) # if attn.norm_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1729 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].attn.norm_k.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[24].attn.norm_k.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].attn.norm_k._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].attn.norm_k.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[24].attn.norm_k.weight, 140581771019104) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].attn.norm_k._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].attn.norm_k._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].attn.norm_k._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].attn.norm_k._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].attn.norm_q, accessed_by=DictGetItemGuardAccessor(norm_q) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[24].attn.norm_q, 140581770778736) # if attn.norm_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1727 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].attn.norm_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[24].attn.norm_q.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].attn.norm_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[24].attn.norm_q.training, 140591004393440) # if attn.norm_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1727 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].attn.norm_q.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[24].attn.norm_q.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].attn.norm_q._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].attn.norm_q.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[24].attn.norm_q.weight, 140581773261248) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].attn.norm_q._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].attn.norm_q._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].attn.norm_q._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].attn.norm_q._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].attn.heads, accessed_by=DictGetItemGuardAccessor(heads) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[24].attn.heads == 24 # head_dim = inner_dim // attn.heads # diffusers/src/diffusers/models/attention_processor.py:1721 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].attn.processor, accessed_by=DictGetItemGuardAccessor(processor) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[24].attn.processor, 93831581524080) # attn_parameters = set(inspect.signature(self.processor.__call__).parameters.keys()) # diffusers/src/diffusers/models/attention_processor.py:479 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[24].attn.processor, 140581770778640) # return self.processor( # diffusers/src/diffusers/models/attention_processor.py:490 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].attn._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].attn._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].attn._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].attn._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].attn.forward, accessed_by=GetAttrGuardAccessor(forward) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].attn.forward, accessed_by=FuncDefaultsGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].attn.forward.__defaults__[0], accessed_by=GetItemGuardAccessor(0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[24].attn.forward.__defaults__[0], 140591004478624) # batch_size, _, _ = hidden_states.shape if encoder_hidden_states is None else encoder_hidden_states.shape # diffusers/src/diffusers/models/attention_processor.py:1713 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].norm, accessed_by=DictGetItemGuardAccessor(norm) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[24].norm, 140581770778304) # norm_hidden_states, gate = self.norm(hidden_states, emb=temb) # diffusers/src/diffusers/models/transformers/transformer_flux.py:88 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].norm.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[24].norm.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].norm.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[24].norm.training, 140591004393440) # norm_hidden_states, gate = self.norm(hidden_states, emb=temb) # diffusers/src/diffusers/models/transformers/transformer_flux.py:88 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].norm._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].norm.norm, accessed_by=DictGetItemGuardAccessor(norm) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[24].norm.norm, 140581770778448) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:171 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].norm.norm.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].norm.norm.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[24].norm.norm.training, 140591004393440) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:171 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].norm.silu, accessed_by=DictGetItemGuardAccessor(silu) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[24].norm.silu, 140581770778352) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].norm.silu.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].norm.silu.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[24].norm.silu.training, 140591004393440) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].norm.linear, accessed_by=DictGetItemGuardAccessor(linear) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[24].norm.linear, 140533232482672) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].norm.linear.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[24].norm.linear.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].norm.linear.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[24].norm.linear.training, 140591004393408) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].norm.linear._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].norm.linear.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[24].norm.linear.lora_A, 140533232483632) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].norm.linear.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].norm.linear.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[24].norm.linear.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].norm.linear.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[24].norm.linear.lora_A['default_0'], 140533232473984) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].norm.linear.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].norm.linear.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[24].norm.linear.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].norm.linear.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].norm.linear.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[24].norm.linear.lora_A['default_0'].weight, 140526259716480) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].norm.linear.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[24].norm.linear.lora_B, 140533232478112) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].norm.linear.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].norm.linear.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[24].norm.linear.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].norm.linear.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[24].norm.linear.lora_B['default_0'], 140533232474416) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].norm.linear.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].norm.linear.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[24].norm.linear.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].norm.linear.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[24].norm.linear.base_layer, 140581770778400) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].norm.linear.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].norm.linear.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[24].norm.linear.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].norm.linear.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[24].norm.linear.lora_dropout, 140533232476528) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].norm.linear.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].norm.linear.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[24].norm.linear.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].norm.linear.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[24].norm.linear.lora_dropout['default_0'], 140533232476240) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].norm.linear.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].norm.linear.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[24].norm.linear.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].norm.linear.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[24].norm.linear.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[24].norm.linear.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].norm.linear.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[24].norm.linear.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].norm.linear.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[24].norm.linear.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[24].norm.linear.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].norm.linear.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[24].norm.linear.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].norm.linear._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].norm.linear._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].norm.linear.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[24].norm.linear.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[24].norm.linear.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].norm.linear._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[24].norm.linear._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].norm.linear._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].norm.linear._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].norm.linear._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[24].norm.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[24].norm.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].norm._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].norm._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].norm._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].norm._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].act_mlp, accessed_by=DictGetItemGuardAccessor(act_mlp) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[24].act_mlp, 140581770778544) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].act_mlp.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].act_mlp.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[24].act_mlp.training, 140591004393440) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].proj_mlp, accessed_by=DictGetItemGuardAccessor(proj_mlp) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[24].proj_mlp, 140533232478592) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].proj_mlp.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[24].proj_mlp.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].proj_mlp.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[24].proj_mlp.training, 140591004393408) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].proj_mlp._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].proj_mlp.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[24].proj_mlp.lora_A, 140533232477104) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].proj_mlp.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].proj_mlp.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[24].proj_mlp.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].proj_mlp.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[24].proj_mlp.lora_A['default_0'], 140533232470960) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].proj_mlp.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].proj_mlp.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[24].proj_mlp.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].proj_mlp.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].proj_mlp.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[24].proj_mlp.lora_A['default_0'].weight, 140526259718960) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].proj_mlp.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[24].proj_mlp.lora_B, 140533232468080) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].proj_mlp.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].proj_mlp.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[24].proj_mlp.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].proj_mlp.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[24].proj_mlp.lora_B['default_0'], 140533232470816) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].proj_mlp.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].proj_mlp.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[24].proj_mlp.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].proj_mlp.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[24].proj_mlp.base_layer, 140581770778496) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].proj_mlp.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].proj_mlp.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[24].proj_mlp.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].proj_mlp.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[24].proj_mlp.lora_dropout, 140533232471008) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].proj_mlp.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].proj_mlp.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[24].proj_mlp.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].proj_mlp.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[24].proj_mlp.lora_dropout['default_0'], 140533232470528) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].proj_mlp.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].proj_mlp.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[24].proj_mlp.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].proj_mlp.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[24].proj_mlp.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[24].proj_mlp.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].proj_mlp.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[24].proj_mlp.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].proj_mlp.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[24].proj_mlp.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[24].proj_mlp.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].proj_mlp.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[24].proj_mlp.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].proj_mlp._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].proj_mlp._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].proj_mlp.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[24].proj_mlp.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[24].proj_mlp.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].proj_mlp._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[24].proj_mlp._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].proj_mlp._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].proj_mlp._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].proj_mlp._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[24].proj_mlp._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[24].proj_mlp._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].proj_out, accessed_by=DictGetItemGuardAccessor(proj_out) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[24].proj_out, 140533232470336) # hidden_states = gate * self.proj_out(hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:98 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].proj_out.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[24].proj_out.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].proj_out.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[24].proj_out.training, 140591004393408) # hidden_states = gate * self.proj_out(hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:98 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].proj_out._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].proj_out.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[24].proj_out.lora_A, 140533232468176) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].proj_out.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].proj_out.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[24].proj_out.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].proj_out.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[24].proj_out.lora_A['default_0'], 140533233883200) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].proj_out.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].proj_out.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[24].proj_out.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].proj_out.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].proj_out.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[24].proj_out.lora_A['default_0'].weight, 140537661620528) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].proj_out.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[24].proj_out.lora_B, 140533232471440) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].proj_out.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].proj_out.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[24].proj_out.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].proj_out.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[24].proj_out.lora_B['default_0'], 140533233887472) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].proj_out.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].proj_out.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[24].proj_out.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].proj_out.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[24].proj_out.base_layer, 140581770778592) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].proj_out.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].proj_out.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[24].proj_out.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].proj_out.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[24].proj_out.lora_dropout, 140533232473120) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].proj_out.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].proj_out.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[24].proj_out.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].proj_out.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[24].proj_out.lora_dropout['default_0'], 140533232472304) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].proj_out.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].proj_out.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[24].proj_out.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].proj_out.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[24].proj_out.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[24].proj_out.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].proj_out.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[24].proj_out.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].proj_out.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[24].proj_out.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[24].proj_out.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].proj_out.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[24].proj_out.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].proj_out._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].proj_out._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].proj_out.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[24].proj_out.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[24].proj_out.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].proj_out._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[24].proj_out._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].proj_out._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].proj_out._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24].proj_out._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[24].proj_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[24].proj_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[24]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25], accessed_by=GetItemGuardAccessor(25) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[25], 140581770778256) # for index_block, block in enumerate(self.single_transformer_blocks): # diffusers/src/diffusers/models/transformers/transformer_flux.py:509 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[25].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[25].training, 140591004393440) # for index_block, block in enumerate(self.single_transformer_blocks): # diffusers/src/diffusers/models/transformers/transformer_flux.py:509 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].attn, accessed_by=DictGetItemGuardAccessor(attn) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[25].attn, 140581770779456) # attn_output = self.attn( # diffusers/src/diffusers/models/transformers/transformer_flux.py:91 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].attn.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[25].attn.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].attn.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[25].attn.training, 140591004393440) # attn_output = self.attn( # diffusers/src/diffusers/models/transformers/transformer_flux.py:91 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].attn._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].attn.to_k, accessed_by=DictGetItemGuardAccessor(to_k) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[25].attn.to_k, 140533234977856) # key = attn.to_k(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1717 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].attn.to_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[25].attn.to_k.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].attn.to_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[25].attn.to_k.training, 140591004393408) # key = attn.to_k(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1717 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].attn.to_k._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].attn.to_k.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[25].attn.to_k.lora_A, 140533234983040) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].attn.to_k.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].attn.to_k.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[25].attn.to_k.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].attn.to_k.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[25].attn.to_k.lora_A['default_0'], 140533235398640) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].attn.to_k.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].attn.to_k.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[25].attn.to_k.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].attn.to_k.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].attn.to_k.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[25].attn.to_k.lora_A['default_0'].weight, 140526672107200) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].attn.to_k.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[25].attn.to_k.lora_B, 140533234975744) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].attn.to_k.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].attn.to_k.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[25].attn.to_k.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].attn.to_k.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[25].attn.to_k.lora_B['default_0'], 140533235397536) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].attn.to_k.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].attn.to_k.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[25].attn.to_k.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].attn.to_k.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[25].attn.to_k.base_layer, 140581770779600) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].attn.to_k.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].attn.to_k.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[25].attn.to_k.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].attn.to_k.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[25].attn.to_k.lora_dropout, 140533234983616) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].attn.to_k.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].attn.to_k.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[25].attn.to_k.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].attn.to_k.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[25].attn.to_k.lora_dropout['default_0'], 140533234976464) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].attn.to_k.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].attn.to_k.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[25].attn.to_k.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].attn.to_k.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[25].attn.to_k.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[25].attn.to_k.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].attn.to_k.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[25].attn.to_k.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].attn.to_k.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[25].attn.to_k.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[25].attn.to_k.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].attn.to_k.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[25].attn.to_k.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].attn.to_k._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].attn.to_k._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].attn.to_k.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[25].attn.to_k.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[25].attn.to_k.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].attn.to_k._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[25].attn.to_k._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].attn.to_k._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].attn.to_k._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].attn.to_k._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[25].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[25].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].attn.to_q, accessed_by=DictGetItemGuardAccessor(to_q) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[25].attn.to_q, 140533234984144) # query = attn.to_q(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1716 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].attn.to_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[25].attn.to_q.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].attn.to_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[25].attn.to_q.training, 140591004393408) # query = attn.to_q(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1716 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].attn.to_q._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].attn.to_q.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[25].attn.to_q.lora_A, 140533234985920) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].attn.to_q.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].attn.to_q.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[25].attn.to_q.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].attn.to_q.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[25].attn.to_q.lora_A['default_0'], 140533234975456) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].attn.to_q.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].attn.to_q.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[25].attn.to_q.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].attn.to_q.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].attn.to_q.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[25].attn.to_q.lora_A['default_0'].weight, 140526672105280) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].attn.to_q.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[25].attn.to_q.lora_B, 140533234974880) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].attn.to_q.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].attn.to_q.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[25].attn.to_q.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].attn.to_q.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[25].attn.to_q.lora_B['default_0'], 140533234977904) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].attn.to_q.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].attn.to_q.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[25].attn.to_q.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].attn.to_q.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[25].attn.to_q.base_layer, 140581770779696) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].attn.to_q.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].attn.to_q.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[25].attn.to_q.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].attn.to_q.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[25].attn.to_q.lora_dropout, 140533234976752) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].attn.to_q.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].attn.to_q.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[25].attn.to_q.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].attn.to_q.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[25].attn.to_q.lora_dropout['default_0'], 140533234976512) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].attn.to_q.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].attn.to_q.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[25].attn.to_q.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].attn.to_q.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[25].attn.to_q.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[25].attn.to_q.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].attn.to_q.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[25].attn.to_q.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].attn.to_q.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[25].attn.to_q.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[25].attn.to_q.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].attn.to_q.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[25].attn.to_q.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].attn.to_q._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].attn.to_q._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].attn.to_q.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[25].attn.to_q.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[25].attn.to_q.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].attn.to_q._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[25].attn.to_q._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].attn.to_q._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].attn.to_q._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].attn.to_q._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[25].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[25].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].attn.to_v, accessed_by=DictGetItemGuardAccessor(to_v) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[25].attn.to_v, 140533235397488) # value = attn.to_v(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1718 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].attn.to_v.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[25].attn.to_v.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].attn.to_v.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[25].attn.to_v.training, 140591004393408) # value = attn.to_v(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1718 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].attn.to_v._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].attn.to_v.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[25].attn.to_v.lora_A, 140533235393552) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].attn.to_v.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].attn.to_v.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[25].attn.to_v.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].attn.to_v.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[25].attn.to_v.lora_A['default_0'], 140533235390336) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].attn.to_v.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].attn.to_v.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[25].attn.to_v.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].attn.to_v.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].attn.to_v.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[25].attn.to_v.lora_A['default_0'].weight, 140526694849872) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].attn.to_v.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[25].attn.to_v.lora_B, 140533235393984) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].attn.to_v.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].attn.to_v.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[25].attn.to_v.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].attn.to_v.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[25].attn.to_v.lora_B['default_0'], 140533235386928) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].attn.to_v.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].attn.to_v.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[25].attn.to_v.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].attn.to_v.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[25].attn.to_v.base_layer, 140581770779744) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].attn.to_v.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].attn.to_v.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[25].attn.to_v.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].attn.to_v.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[25].attn.to_v.lora_dropout, 140533235393888) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].attn.to_v.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].attn.to_v.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[25].attn.to_v.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].attn.to_v.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[25].attn.to_v.lora_dropout['default_0'], 140533235385920) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].attn.to_v.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].attn.to_v.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[25].attn.to_v.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].attn.to_v.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[25].attn.to_v.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[25].attn.to_v.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].attn.to_v.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[25].attn.to_v.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].attn.to_v.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[25].attn.to_v.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[25].attn.to_v.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].attn.to_v.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[25].attn.to_v.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].attn.to_v._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].attn.to_v._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].attn.to_v.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[25].attn.to_v.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[25].attn.to_v.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].attn.to_v._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[25].attn.to_v._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].attn.to_v._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].attn.to_v._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].attn.to_v._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[25].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[25].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].attn.norm_k, accessed_by=DictGetItemGuardAccessor(norm_k) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[25].attn.norm_k, 140581770779648) # if attn.norm_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1729 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].attn.norm_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[25].attn.norm_k.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].attn.norm_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[25].attn.norm_k.training, 140591004393440) # if attn.norm_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1729 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].attn.norm_k.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[25].attn.norm_k.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].attn.norm_k._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].attn.norm_k.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[25].attn.norm_k.weight, 140581773247968) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].attn.norm_k._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].attn.norm_k._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].attn.norm_k._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].attn.norm_k._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].attn.norm_q, accessed_by=DictGetItemGuardAccessor(norm_q) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[25].attn.norm_q, 140581770779504) # if attn.norm_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1727 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].attn.norm_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[25].attn.norm_q.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].attn.norm_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[25].attn.norm_q.training, 140591004393440) # if attn.norm_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1727 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].attn.norm_q.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[25].attn.norm_q.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].attn.norm_q._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].attn.norm_q.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[25].attn.norm_q.weight, 140581765132224) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].attn.norm_q._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].attn.norm_q._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].attn.norm_q._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].attn.norm_q._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].attn.heads, accessed_by=DictGetItemGuardAccessor(heads) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[25].attn.heads == 24 # head_dim = inner_dim // attn.heads # diffusers/src/diffusers/models/attention_processor.py:1721 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].attn.processor, accessed_by=DictGetItemGuardAccessor(processor) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[25].attn.processor, 93831581524080) # attn_parameters = set(inspect.signature(self.processor.__call__).parameters.keys()) # diffusers/src/diffusers/models/attention_processor.py:479 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[25].attn.processor, 140581770779408) # return self.processor( # diffusers/src/diffusers/models/attention_processor.py:490 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].attn._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].attn._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].attn._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].attn._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].attn.forward, accessed_by=GetAttrGuardAccessor(forward) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].attn.forward, accessed_by=FuncDefaultsGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].attn.forward.__defaults__[0], accessed_by=GetItemGuardAccessor(0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[25].attn.forward.__defaults__[0], 140591004478624) # batch_size, _, _ = hidden_states.shape if encoder_hidden_states is None else encoder_hidden_states.shape # diffusers/src/diffusers/models/attention_processor.py:1713 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].norm, accessed_by=DictGetItemGuardAccessor(norm) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[25].norm, 140581770779072) # norm_hidden_states, gate = self.norm(hidden_states, emb=temb) # diffusers/src/diffusers/models/transformers/transformer_flux.py:88 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].norm.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[25].norm.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].norm.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[25].norm.training, 140591004393440) # norm_hidden_states, gate = self.norm(hidden_states, emb=temb) # diffusers/src/diffusers/models/transformers/transformer_flux.py:88 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].norm._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].norm.norm, accessed_by=DictGetItemGuardAccessor(norm) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[25].norm.norm, 140581770779216) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:171 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].norm.norm.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].norm.norm.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[25].norm.norm.training, 140591004393440) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:171 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].norm.silu, accessed_by=DictGetItemGuardAccessor(silu) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[25].norm.silu, 140581770779120) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].norm.silu.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].norm.silu.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[25].norm.silu.training, 140591004393440) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].norm.linear, accessed_by=DictGetItemGuardAccessor(linear) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[25].norm.linear, 140533233878064) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].norm.linear.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[25].norm.linear.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].norm.linear.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[25].norm.linear.training, 140591004393408) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].norm.linear._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].norm.linear.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[25].norm.linear.lora_A, 140533233879120) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].norm.linear.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].norm.linear.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[25].norm.linear.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].norm.linear.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[25].norm.linear.lora_A['default_0'], 140533234988896) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].norm.linear.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].norm.linear.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[25].norm.linear.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].norm.linear.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].norm.linear.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[25].norm.linear.lora_A['default_0'].weight, 140533111538448) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].norm.linear.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[25].norm.linear.lora_B, 140533233888960) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].norm.linear.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].norm.linear.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[25].norm.linear.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].norm.linear.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[25].norm.linear.lora_B['default_0'], 140533234976992) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].norm.linear.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].norm.linear.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[25].norm.linear.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].norm.linear.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[25].norm.linear.base_layer, 140581770779168) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].norm.linear.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].norm.linear.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[25].norm.linear.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].norm.linear.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[25].norm.linear.lora_dropout, 140533233892608) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].norm.linear.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].norm.linear.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[25].norm.linear.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].norm.linear.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[25].norm.linear.lora_dropout['default_0'], 140533233888384) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].norm.linear.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].norm.linear.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[25].norm.linear.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].norm.linear.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[25].norm.linear.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[25].norm.linear.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].norm.linear.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[25].norm.linear.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].norm.linear.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[25].norm.linear.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[25].norm.linear.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].norm.linear.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[25].norm.linear.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].norm.linear._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].norm.linear._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].norm.linear.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[25].norm.linear.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[25].norm.linear.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].norm.linear._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[25].norm.linear._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].norm.linear._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].norm.linear._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].norm.linear._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[25].norm.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[25].norm.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].norm._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].norm._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].norm._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].norm._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].act_mlp, accessed_by=DictGetItemGuardAccessor(act_mlp) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[25].act_mlp, 140581770779312) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].act_mlp.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].act_mlp.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[25].act_mlp.training, 140591004393440) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].proj_mlp, accessed_by=DictGetItemGuardAccessor(proj_mlp) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[25].proj_mlp, 140533234990480) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].proj_mlp.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[25].proj_mlp.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].proj_mlp.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[25].proj_mlp.training, 140591004393408) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].proj_mlp._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].proj_mlp.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[25].proj_mlp.lora_A, 140533234990000) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].proj_mlp.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].proj_mlp.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[25].proj_mlp.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].proj_mlp.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[25].proj_mlp.lora_A['default_0'], 140533234981360) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].proj_mlp.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].proj_mlp.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[25].proj_mlp.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].proj_mlp.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].proj_mlp.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[25].proj_mlp.lora_A['default_0'].weight, 140533111552608) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].proj_mlp.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[25].proj_mlp.lora_B, 140533234987888) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].proj_mlp.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].proj_mlp.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[25].proj_mlp.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].proj_mlp.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[25].proj_mlp.lora_B['default_0'], 140533234985872) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].proj_mlp.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].proj_mlp.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[25].proj_mlp.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].proj_mlp.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[25].proj_mlp.base_layer, 140581770779264) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].proj_mlp.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].proj_mlp.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[25].proj_mlp.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].proj_mlp.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[25].proj_mlp.lora_dropout, 140533234988320) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].proj_mlp.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].proj_mlp.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[25].proj_mlp.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].proj_mlp.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[25].proj_mlp.lora_dropout['default_0'], 140533234986112) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].proj_mlp.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].proj_mlp.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[25].proj_mlp.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].proj_mlp.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[25].proj_mlp.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[25].proj_mlp.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].proj_mlp.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[25].proj_mlp.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].proj_mlp.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[25].proj_mlp.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[25].proj_mlp.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].proj_mlp.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[25].proj_mlp.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].proj_mlp._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].proj_mlp._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].proj_mlp.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[25].proj_mlp.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[25].proj_mlp.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].proj_mlp._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[25].proj_mlp._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].proj_mlp._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].proj_mlp._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].proj_mlp._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[25].proj_mlp._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[25].proj_mlp._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].proj_out, accessed_by=DictGetItemGuardAccessor(proj_out) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[25].proj_out, 140533234988752) # hidden_states = gate * self.proj_out(hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:98 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].proj_out.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[25].proj_out.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].proj_out.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[25].proj_out.training, 140591004393408) # hidden_states = gate * self.proj_out(hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:98 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].proj_out._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].proj_out.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[25].proj_out.lora_A, 140533234979248) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].proj_out.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].proj_out.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[25].proj_out.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].proj_out.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[25].proj_out.lora_A['default_0'], 140533234974976) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].proj_out.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].proj_out.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[25].proj_out.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].proj_out.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].proj_out.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[25].proj_out.lora_A['default_0'].weight, 140533111542368) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].proj_out.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[25].proj_out.lora_B, 140533234975600) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].proj_out.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].proj_out.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[25].proj_out.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].proj_out.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[25].proj_out.lora_B['default_0'], 140533234980736) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].proj_out.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].proj_out.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[25].proj_out.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].proj_out.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[25].proj_out.base_layer, 140581770779360) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].proj_out.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].proj_out.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[25].proj_out.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].proj_out.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[25].proj_out.lora_dropout, 140533234976800) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].proj_out.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].proj_out.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[25].proj_out.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].proj_out.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[25].proj_out.lora_dropout['default_0'], 140533234978528) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].proj_out.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].proj_out.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[25].proj_out.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].proj_out.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[25].proj_out.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[25].proj_out.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].proj_out.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[25].proj_out.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].proj_out.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[25].proj_out.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[25].proj_out.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].proj_out.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[25].proj_out.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].proj_out._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].proj_out._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].proj_out.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[25].proj_out.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[25].proj_out.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].proj_out._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[25].proj_out._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].proj_out._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].proj_out._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25].proj_out._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[25].proj_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[25].proj_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[25]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26], accessed_by=GetItemGuardAccessor(26) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[26], 140581770779024) # for index_block, block in enumerate(self.single_transformer_blocks): # diffusers/src/diffusers/models/transformers/transformer_flux.py:509 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[26].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[26].training, 140591004393440) # for index_block, block in enumerate(self.single_transformer_blocks): # diffusers/src/diffusers/models/transformers/transformer_flux.py:509 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].attn, accessed_by=DictGetItemGuardAccessor(attn) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[26].attn, 140581770780224) # attn_output = self.attn( # diffusers/src/diffusers/models/transformers/transformer_flux.py:91 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].attn.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[26].attn.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].attn.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[26].attn.training, 140591004393440) # attn_output = self.attn( # diffusers/src/diffusers/models/transformers/transformer_flux.py:91 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].attn._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].attn.to_k, accessed_by=DictGetItemGuardAccessor(to_k) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[26].attn.to_k, 140533235556288) # key = attn.to_k(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1717 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].attn.to_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[26].attn.to_k.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].attn.to_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[26].attn.to_k.training, 140591004393408) # key = attn.to_k(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1717 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].attn.to_k._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].attn.to_k.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[26].attn.to_k.lora_A, 140533235558784) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].attn.to_k.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].attn.to_k.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[26].attn.to_k.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].attn.to_k.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[26].attn.to_k.lora_A['default_0'], 140533235549856) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].attn.to_k.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].attn.to_k.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[26].attn.to_k.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].attn.to_k.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].attn.to_k.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[26].attn.to_k.lora_A['default_0'].weight, 140526681364720) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].attn.to_k.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[26].attn.to_k.lora_B, 140533235552112) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].attn.to_k.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].attn.to_k.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[26].attn.to_k.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].attn.to_k.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[26].attn.to_k.lora_B['default_0'], 140533235554704) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].attn.to_k.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].attn.to_k.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[26].attn.to_k.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].attn.to_k.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[26].attn.to_k.base_layer, 140581770780368) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].attn.to_k.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].attn.to_k.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[26].attn.to_k.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].attn.to_k.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[26].attn.to_k.lora_dropout, 140533235552256) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].attn.to_k.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].attn.to_k.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[26].attn.to_k.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].attn.to_k.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[26].attn.to_k.lora_dropout['default_0'], 140533235551248) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].attn.to_k.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].attn.to_k.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[26].attn.to_k.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].attn.to_k.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[26].attn.to_k.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[26].attn.to_k.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].attn.to_k.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[26].attn.to_k.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].attn.to_k.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[26].attn.to_k.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[26].attn.to_k.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].attn.to_k.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[26].attn.to_k.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].attn.to_k._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].attn.to_k._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].attn.to_k.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[26].attn.to_k.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[26].attn.to_k.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].attn.to_k._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[26].attn.to_k._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].attn.to_k._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].attn.to_k._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].attn.to_k._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[26].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[26].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].attn.to_q, accessed_by=DictGetItemGuardAccessor(to_q) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[26].attn.to_q, 140533235561328) # query = attn.to_q(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1716 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].attn.to_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[26].attn.to_q.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].attn.to_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[26].attn.to_q.training, 140591004393408) # query = attn.to_q(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1716 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].attn.to_q._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].attn.to_q.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[26].attn.to_q.lora_A, 140533235561136) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].attn.to_q.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].attn.to_q.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[26].attn.to_q.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].attn.to_q.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[26].attn.to_q.lora_A['default_0'], 140533235555280) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].attn.to_q.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].attn.to_q.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[26].attn.to_q.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].attn.to_q.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].attn.to_q.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[26].attn.to_q.lora_A['default_0'].weight, 140526681361280) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].attn.to_q.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[26].attn.to_q.lora_B, 140533235557824) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].attn.to_q.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].attn.to_q.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[26].attn.to_q.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].attn.to_q.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[26].attn.to_q.lora_B['default_0'], 140533235555184) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].attn.to_q.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].attn.to_q.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[26].attn.to_q.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].attn.to_q.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[26].attn.to_q.base_layer, 140581770780464) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].attn.to_q.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].attn.to_q.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[26].attn.to_q.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].attn.to_q.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[26].attn.to_q.lora_dropout, 140533235558160) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].attn.to_q.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].attn.to_q.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[26].attn.to_q.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].attn.to_q.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[26].attn.to_q.lora_dropout['default_0'], 140533235557728) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].attn.to_q.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].attn.to_q.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[26].attn.to_q.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].attn.to_q.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[26].attn.to_q.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[26].attn.to_q.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].attn.to_q.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[26].attn.to_q.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].attn.to_q.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[26].attn.to_q.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[26].attn.to_q.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].attn.to_q.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[26].attn.to_q.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].attn.to_q._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].attn.to_q._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].attn.to_q.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[26].attn.to_q.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[26].attn.to_q.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].attn.to_q._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[26].attn.to_q._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].attn.to_q._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].attn.to_q._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].attn.to_q._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[26].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[26].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].attn.to_v, accessed_by=DictGetItemGuardAccessor(to_v) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[26].attn.to_v, 140533235549136) # value = attn.to_v(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1718 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].attn.to_v.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[26].attn.to_v.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].attn.to_v.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[26].attn.to_v.training, 140591004393408) # value = attn.to_v(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1718 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].attn.to_v._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].attn.to_v.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[26].attn.to_v.lora_A, 140533235550480) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].attn.to_v.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].attn.to_v.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[26].attn.to_v.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].attn.to_v.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[26].attn.to_v.lora_A['default_0'], 140533235858352) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].attn.to_v.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].attn.to_v.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[26].attn.to_v.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].attn.to_v.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].attn.to_v.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[26].attn.to_v.lora_A['default_0'].weight, 140526681362640) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].attn.to_v.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[26].attn.to_v.lora_B, 140533235548656) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].attn.to_v.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].attn.to_v.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[26].attn.to_v.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].attn.to_v.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[26].attn.to_v.lora_B['default_0'], 140533235849712) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].attn.to_v.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].attn.to_v.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[26].attn.to_v.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].attn.to_v.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[26].attn.to_v.base_layer, 140581770780512) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].attn.to_v.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].attn.to_v.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[26].attn.to_v.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].attn.to_v.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[26].attn.to_v.lora_dropout, 140533235555904) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].attn.to_v.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].attn.to_v.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[26].attn.to_v.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].attn.to_v.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[26].attn.to_v.lora_dropout['default_0'], 140533235553552) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].attn.to_v.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].attn.to_v.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[26].attn.to_v.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].attn.to_v.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[26].attn.to_v.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[26].attn.to_v.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].attn.to_v.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[26].attn.to_v.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].attn.to_v.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[26].attn.to_v.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[26].attn.to_v.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].attn.to_v.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[26].attn.to_v.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].attn.to_v._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].attn.to_v._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].attn.to_v.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[26].attn.to_v.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[26].attn.to_v.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].attn.to_v._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[26].attn.to_v._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].attn.to_v._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].attn.to_v._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].attn.to_v._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[26].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[26].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].attn.norm_k, accessed_by=DictGetItemGuardAccessor(norm_k) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[26].attn.norm_k, 140581770780416) # if attn.norm_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1729 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].attn.norm_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[26].attn.norm_k.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].attn.norm_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[26].attn.norm_k.training, 140591004393440) # if attn.norm_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1729 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].attn.norm_k.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[26].attn.norm_k.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].attn.norm_k._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].attn.norm_k.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[26].attn.norm_k.weight, 140581783341632) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].attn.norm_k._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].attn.norm_k._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].attn.norm_k._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].attn.norm_k._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].attn.norm_q, accessed_by=DictGetItemGuardAccessor(norm_q) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[26].attn.norm_q, 140581770780272) # if attn.norm_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1727 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].attn.norm_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[26].attn.norm_q.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].attn.norm_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[26].attn.norm_q.training, 140591004393440) # if attn.norm_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1727 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].attn.norm_q.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[26].attn.norm_q.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].attn.norm_q._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].attn.norm_q.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[26].attn.norm_q.weight, 140581773250448) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].attn.norm_q._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].attn.norm_q._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].attn.norm_q._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].attn.norm_q._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].attn.heads, accessed_by=DictGetItemGuardAccessor(heads) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[26].attn.heads == 24 # head_dim = inner_dim // attn.heads # diffusers/src/diffusers/models/attention_processor.py:1721 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].attn.processor, accessed_by=DictGetItemGuardAccessor(processor) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[26].attn.processor, 93831581524080) # attn_parameters = set(inspect.signature(self.processor.__call__).parameters.keys()) # diffusers/src/diffusers/models/attention_processor.py:479 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[26].attn.processor, 140581770780176) # return self.processor( # diffusers/src/diffusers/models/attention_processor.py:490 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].attn._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].attn._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].attn._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].attn._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].attn.forward, accessed_by=GetAttrGuardAccessor(forward) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].attn.forward, accessed_by=FuncDefaultsGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].attn.forward.__defaults__[0], accessed_by=GetItemGuardAccessor(0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[26].attn.forward.__defaults__[0], 140591004478624) # batch_size, _, _ = hidden_states.shape if encoder_hidden_states is None else encoder_hidden_states.shape # diffusers/src/diffusers/models/attention_processor.py:1713 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].norm, accessed_by=DictGetItemGuardAccessor(norm) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[26].norm, 140581770779840) # norm_hidden_states, gate = self.norm(hidden_states, emb=temb) # diffusers/src/diffusers/models/transformers/transformer_flux.py:88 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].norm.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[26].norm.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].norm.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[26].norm.training, 140591004393440) # norm_hidden_states, gate = self.norm(hidden_states, emb=temb) # diffusers/src/diffusers/models/transformers/transformer_flux.py:88 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].norm._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].norm.norm, accessed_by=DictGetItemGuardAccessor(norm) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[26].norm.norm, 140581770779984) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:171 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].norm.norm.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].norm.norm.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[26].norm.norm.training, 140591004393440) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:171 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].norm.silu, accessed_by=DictGetItemGuardAccessor(silu) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[26].norm.silu, 140581770779888) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].norm.silu.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].norm.silu.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[26].norm.silu.training, 140591004393440) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].norm.linear, accessed_by=DictGetItemGuardAccessor(linear) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[26].norm.linear, 140533235385392) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].norm.linear.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[26].norm.linear.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].norm.linear.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[26].norm.linear.training, 140591004393408) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].norm.linear._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].norm.linear.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[26].norm.linear.lora_A, 140533235386448) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].norm.linear.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].norm.linear.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[26].norm.linear.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].norm.linear.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[26].norm.linear.lora_A['default_0'], 140533235388416) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].norm.linear.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].norm.linear.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[26].norm.linear.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].norm.linear.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].norm.linear.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[26].norm.linear.lora_A['default_0'].weight, 140526694843392) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].norm.linear.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[26].norm.linear.lora_B, 140533235386256) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].norm.linear.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].norm.linear.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[26].norm.linear.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].norm.linear.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[26].norm.linear.lora_B['default_0'], 140533235388560) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].norm.linear.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].norm.linear.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[26].norm.linear.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].norm.linear.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[26].norm.linear.base_layer, 140581770779936) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].norm.linear.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].norm.linear.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[26].norm.linear.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].norm.linear.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[26].norm.linear.lora_dropout, 140533235388464) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].norm.linear.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].norm.linear.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[26].norm.linear.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].norm.linear.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[26].norm.linear.lora_dropout['default_0'], 140533235388080) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].norm.linear.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].norm.linear.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[26].norm.linear.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].norm.linear.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[26].norm.linear.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[26].norm.linear.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].norm.linear.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[26].norm.linear.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].norm.linear.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[26].norm.linear.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[26].norm.linear.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].norm.linear.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[26].norm.linear.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].norm.linear._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].norm.linear._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].norm.linear.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[26].norm.linear.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[26].norm.linear.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].norm.linear._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[26].norm.linear._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].norm.linear._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].norm.linear._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].norm.linear._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[26].norm.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[26].norm.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].norm._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].norm._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].norm._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].norm._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].act_mlp, accessed_by=DictGetItemGuardAccessor(act_mlp) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[26].act_mlp, 140581770780080) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].act_mlp.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].act_mlp.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[26].act_mlp.training, 140591004393440) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].proj_mlp, accessed_by=DictGetItemGuardAccessor(proj_mlp) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[26].proj_mlp, 140533235389856) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].proj_mlp.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[26].proj_mlp.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].proj_mlp.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[26].proj_mlp.training, 140591004393408) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].proj_mlp._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].proj_mlp.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[26].proj_mlp.lora_A, 140533235384576) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].proj_mlp.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].proj_mlp.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[26].proj_mlp.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].proj_mlp.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[26].proj_mlp.lora_A['default_0'], 140533235400320) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].proj_mlp.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].proj_mlp.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[26].proj_mlp.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].proj_mlp.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].proj_mlp.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[26].proj_mlp.lora_A['default_0'].weight, 140526694846832) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].proj_mlp.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[26].proj_mlp.lora_B, 140533235384384) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].proj_mlp.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].proj_mlp.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[26].proj_mlp.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].proj_mlp.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[26].proj_mlp.lora_B['default_0'], 140533235393120) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].proj_mlp.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].proj_mlp.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[26].proj_mlp.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].proj_mlp.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[26].proj_mlp.base_layer, 140581770780032) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].proj_mlp.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].proj_mlp.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[26].proj_mlp.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].proj_mlp.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[26].proj_mlp.lora_dropout, 140533235400560) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].proj_mlp.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].proj_mlp.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[26].proj_mlp.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].proj_mlp.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[26].proj_mlp.lora_dropout['default_0'], 140533235389232) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].proj_mlp.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].proj_mlp.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[26].proj_mlp.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].proj_mlp.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[26].proj_mlp.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[26].proj_mlp.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].proj_mlp.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[26].proj_mlp.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].proj_mlp.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[26].proj_mlp.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[26].proj_mlp.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].proj_mlp.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[26].proj_mlp.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].proj_mlp._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].proj_mlp._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].proj_mlp.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[26].proj_mlp.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[26].proj_mlp.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].proj_mlp._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[26].proj_mlp._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].proj_mlp._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].proj_mlp._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].proj_mlp._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[26].proj_mlp._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[26].proj_mlp._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].proj_out, accessed_by=DictGetItemGuardAccessor(proj_out) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[26].proj_out, 140533218817120) # hidden_states = gate * self.proj_out(hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:98 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].proj_out.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[26].proj_out.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].proj_out.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[26].proj_out.training, 140591004393408) # hidden_states = gate * self.proj_out(hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:98 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].proj_out._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].proj_out.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[26].proj_out.lora_A, 140533235551056) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].proj_out.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].proj_out.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[26].proj_out.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].proj_out.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[26].proj_out.lora_A['default_0'], 140533235553360) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].proj_out.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].proj_out.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[26].proj_out.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].proj_out.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].proj_out.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[26].proj_out.lora_A['default_0'].weight, 140526694848592) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].proj_out.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[26].proj_out.lora_B, 140533235558688) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].proj_out.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].proj_out.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[26].proj_out.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].proj_out.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[26].proj_out.lora_B['default_0'], 140533235560656) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].proj_out.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].proj_out.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[26].proj_out.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].proj_out.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[26].proj_out.base_layer, 140581770780128) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].proj_out.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].proj_out.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[26].proj_out.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].proj_out.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[26].proj_out.lora_dropout, 140533235554464) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].proj_out.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].proj_out.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[26].proj_out.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].proj_out.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[26].proj_out.lora_dropout['default_0'], 140533235556144) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].proj_out.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].proj_out.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[26].proj_out.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].proj_out.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[26].proj_out.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[26].proj_out.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].proj_out.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[26].proj_out.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].proj_out.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[26].proj_out.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[26].proj_out.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].proj_out.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[26].proj_out.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].proj_out._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].proj_out._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].proj_out.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[26].proj_out.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[26].proj_out.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].proj_out._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[26].proj_out._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].proj_out._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].proj_out._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26].proj_out._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[26].proj_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[26].proj_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[26]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27], accessed_by=GetItemGuardAccessor(27) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[27], 140581770779792) # for index_block, block in enumerate(self.single_transformer_blocks): # diffusers/src/diffusers/models/transformers/transformer_flux.py:509 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[27].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[27].training, 140591004393440) # for index_block, block in enumerate(self.single_transformer_blocks): # diffusers/src/diffusers/models/transformers/transformer_flux.py:509 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].attn, accessed_by=DictGetItemGuardAccessor(attn) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[27].attn, 140581770780992) # attn_output = self.attn( # diffusers/src/diffusers/models/transformers/transformer_flux.py:91 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].attn.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[27].attn.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].attn.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[27].attn.training, 140591004393440) # attn_output = self.attn( # diffusers/src/diffusers/models/transformers/transformer_flux.py:91 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].attn._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].attn.to_k, accessed_by=DictGetItemGuardAccessor(to_k) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[27].attn.to_k, 140533235392208) # key = attn.to_k(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1717 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].attn.to_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[27].attn.to_k.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].attn.to_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[27].attn.to_k.training, 140591004393408) # key = attn.to_k(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1717 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].attn.to_k._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].attn.to_k.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[27].attn.to_k.lora_A, 140533236790272) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].attn.to_k.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].attn.to_k.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[27].attn.to_k.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].attn.to_k.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[27].attn.to_k.lora_A['default_0'], 140533236787872) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].attn.to_k.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].attn.to_k.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[27].attn.to_k.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].attn.to_k.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].attn.to_k.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[27].attn.to_k.lora_A['default_0'].weight, 140537653407920) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].attn.to_k.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[27].attn.to_k.lora_B, 140533236791568) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].attn.to_k.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].attn.to_k.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[27].attn.to_k.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].attn.to_k.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[27].attn.to_k.lora_B['default_0'], 140533236788304) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].attn.to_k.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].attn.to_k.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[27].attn.to_k.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].attn.to_k.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[27].attn.to_k.base_layer, 140581770781136) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].attn.to_k.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].attn.to_k.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[27].attn.to_k.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].attn.to_k.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[27].attn.to_k.lora_dropout, 140533236792192) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].attn.to_k.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].attn.to_k.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[27].attn.to_k.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].attn.to_k.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[27].attn.to_k.lora_dropout['default_0'], 140533236791280) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].attn.to_k.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].attn.to_k.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[27].attn.to_k.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].attn.to_k.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[27].attn.to_k.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[27].attn.to_k.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].attn.to_k.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[27].attn.to_k.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].attn.to_k.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[27].attn.to_k.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[27].attn.to_k.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].attn.to_k.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[27].attn.to_k.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].attn.to_k._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].attn.to_k._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].attn.to_k.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[27].attn.to_k.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[27].attn.to_k.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].attn.to_k._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[27].attn.to_k._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].attn.to_k._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].attn.to_k._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].attn.to_k._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[27].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[27].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].attn.to_q, accessed_by=DictGetItemGuardAccessor(to_q) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[27].attn.to_q, 140533235845968) # query = attn.to_q(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1716 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].attn.to_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[27].attn.to_q.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].attn.to_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[27].attn.to_q.training, 140591004393408) # query = attn.to_q(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1716 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].attn.to_q._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].attn.to_q.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[27].attn.to_q.lora_A, 140533235846256) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].attn.to_q.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].attn.to_q.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[27].attn.to_q.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].attn.to_q.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[27].attn.to_q.lora_A['default_0'], 140533235854176) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].attn.to_q.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].attn.to_q.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[27].attn.to_q.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].attn.to_q.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].attn.to_q.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[27].attn.to_q.lora_A['default_0'].weight, 140537653404160) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].attn.to_q.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[27].attn.to_q.lora_B, 140533235846976) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].attn.to_q.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].attn.to_q.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[27].attn.to_q.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].attn.to_q.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[27].attn.to_q.lora_B['default_0'], 140533235853456) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].attn.to_q.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].attn.to_q.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[27].attn.to_q.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].attn.to_q.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[27].attn.to_q.base_layer, 140581770781232) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].attn.to_q.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].attn.to_q.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[27].attn.to_q.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].attn.to_q.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[27].attn.to_q.lora_dropout, 140533235854416) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].attn.to_q.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].attn.to_q.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[27].attn.to_q.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].attn.to_q.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[27].attn.to_q.lora_dropout['default_0'], 140533235847024) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].attn.to_q.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].attn.to_q.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[27].attn.to_q.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].attn.to_q.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[27].attn.to_q.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[27].attn.to_q.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].attn.to_q.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[27].attn.to_q.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].attn.to_q.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[27].attn.to_q.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[27].attn.to_q.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].attn.to_q.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[27].attn.to_q.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].attn.to_q._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].attn.to_q._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].attn.to_q.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[27].attn.to_q.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[27].attn.to_q.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].attn.to_q._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[27].attn.to_q._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].attn.to_q._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].attn.to_q._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].attn.to_q._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[27].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[27].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].attn.to_v, accessed_by=DictGetItemGuardAccessor(to_v) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[27].attn.to_v, 140533236781008) # value = attn.to_v(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1718 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].attn.to_v.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[27].attn.to_v.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].attn.to_v.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[27].attn.to_v.training, 140591004393408) # value = attn.to_v(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1718 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].attn.to_v._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].attn.to_v.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[27].attn.to_v.lora_A, 140533236784416) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].attn.to_v.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].attn.to_v.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[27].attn.to_v.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].attn.to_v.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[27].attn.to_v.lora_A['default_0'], 140533236782544) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].attn.to_v.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].attn.to_v.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[27].attn.to_v.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].attn.to_v.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].attn.to_v.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[27].attn.to_v.lora_A['default_0'].weight, 140537641695840) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].attn.to_v.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[27].attn.to_v.lora_B, 140533236784752) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].attn.to_v.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].attn.to_v.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[27].attn.to_v.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].attn.to_v.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[27].attn.to_v.lora_B['default_0'], 140533236778224) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].attn.to_v.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].attn.to_v.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[27].attn.to_v.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].attn.to_v.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[27].attn.to_v.base_layer, 140581770781280) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].attn.to_v.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].attn.to_v.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[27].attn.to_v.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].attn.to_v.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[27].attn.to_v.lora_dropout, 140533236784176) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].attn.to_v.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].attn.to_v.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[27].attn.to_v.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].attn.to_v.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[27].attn.to_v.lora_dropout['default_0'], 140533236780528) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].attn.to_v.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].attn.to_v.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[27].attn.to_v.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].attn.to_v.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[27].attn.to_v.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[27].attn.to_v.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].attn.to_v.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[27].attn.to_v.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].attn.to_v.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[27].attn.to_v.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[27].attn.to_v.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].attn.to_v.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[27].attn.to_v.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].attn.to_v._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].attn.to_v._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].attn.to_v.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[27].attn.to_v.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[27].attn.to_v.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].attn.to_v._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[27].attn.to_v._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].attn.to_v._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].attn.to_v._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].attn.to_v._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[27].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[27].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].attn.norm_k, accessed_by=DictGetItemGuardAccessor(norm_k) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[27].attn.norm_k, 140581770781184) # if attn.norm_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1729 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].attn.norm_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[27].attn.norm_k.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].attn.norm_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[27].attn.norm_k.training, 140591004393440) # if attn.norm_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1729 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].attn.norm_k.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[27].attn.norm_k.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].attn.norm_k._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].attn.norm_k.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[27].attn.norm_k.weight, 140581773250208) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].attn.norm_k._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].attn.norm_k._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].attn.norm_k._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].attn.norm_k._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].attn.norm_q, accessed_by=DictGetItemGuardAccessor(norm_q) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[27].attn.norm_q, 140581770781040) # if attn.norm_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1727 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].attn.norm_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[27].attn.norm_q.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].attn.norm_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[27].attn.norm_q.training, 140591004393440) # if attn.norm_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1727 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].attn.norm_q.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[27].attn.norm_q.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].attn.norm_q._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].attn.norm_q.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[27].attn.norm_q.weight, 140581771022304) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].attn.norm_q._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].attn.norm_q._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].attn.norm_q._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].attn.norm_q._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].attn.heads, accessed_by=DictGetItemGuardAccessor(heads) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[27].attn.heads == 24 # head_dim = inner_dim // attn.heads # diffusers/src/diffusers/models/attention_processor.py:1721 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].attn.processor, accessed_by=DictGetItemGuardAccessor(processor) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[27].attn.processor, 93831581524080) # attn_parameters = set(inspect.signature(self.processor.__call__).parameters.keys()) # diffusers/src/diffusers/models/attention_processor.py:479 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[27].attn.processor, 140581770780944) # return self.processor( # diffusers/src/diffusers/models/attention_processor.py:490 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].attn._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].attn._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].attn._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].attn._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].attn.forward, accessed_by=GetAttrGuardAccessor(forward) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].attn.forward, accessed_by=FuncDefaultsGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].attn.forward.__defaults__[0], accessed_by=GetItemGuardAccessor(0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[27].attn.forward.__defaults__[0], 140591004478624) # batch_size, _, _ = hidden_states.shape if encoder_hidden_states is None else encoder_hidden_states.shape # diffusers/src/diffusers/models/attention_processor.py:1713 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].norm, accessed_by=DictGetItemGuardAccessor(norm) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[27].norm, 140581770780608) # norm_hidden_states, gate = self.norm(hidden_states, emb=temb) # diffusers/src/diffusers/models/transformers/transformer_flux.py:88 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].norm.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[27].norm.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].norm.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[27].norm.training, 140591004393440) # norm_hidden_states, gate = self.norm(hidden_states, emb=temb) # diffusers/src/diffusers/models/transformers/transformer_flux.py:88 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].norm._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].norm.norm, accessed_by=DictGetItemGuardAccessor(norm) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[27].norm.norm, 140581770780752) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:171 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].norm.norm.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].norm.norm.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[27].norm.norm.training, 140591004393440) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:171 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].norm.silu, accessed_by=DictGetItemGuardAccessor(silu) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[27].norm.silu, 140581770780656) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].norm.silu.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].norm.silu.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[27].norm.silu.training, 140591004393440) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].norm.linear, accessed_by=DictGetItemGuardAccessor(linear) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[27].norm.linear, 140533235854464) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].norm.linear.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[27].norm.linear.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].norm.linear.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[27].norm.linear.training, 140591004393408) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].norm.linear._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].norm.linear.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[27].norm.linear.lora_A, 140533235855760) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].norm.linear.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].norm.linear.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[27].norm.linear.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].norm.linear.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[27].norm.linear.lora_A['default_0'], 140533235854656) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].norm.linear.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].norm.linear.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[27].norm.linear.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].norm.linear.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].norm.linear.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[27].norm.linear.lora_A['default_0'].weight, 140526681373440) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].norm.linear.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[27].norm.linear.lora_B, 140533235858640) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].norm.linear.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].norm.linear.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[27].norm.linear.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].norm.linear.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[27].norm.linear.lora_B['default_0'], 140533235851968) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].norm.linear.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].norm.linear.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[27].norm.linear.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].norm.linear.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[27].norm.linear.base_layer, 140581770780704) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].norm.linear.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].norm.linear.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[27].norm.linear.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].norm.linear.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[27].norm.linear.lora_dropout, 140533235850816) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].norm.linear.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].norm.linear.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[27].norm.linear.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].norm.linear.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[27].norm.linear.lora_dropout['default_0'], 140533235856432) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].norm.linear.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].norm.linear.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[27].norm.linear.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].norm.linear.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[27].norm.linear.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[27].norm.linear.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].norm.linear.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[27].norm.linear.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].norm.linear.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[27].norm.linear.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[27].norm.linear.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].norm.linear.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[27].norm.linear.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].norm.linear._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].norm.linear._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].norm.linear.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[27].norm.linear.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[27].norm.linear.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].norm.linear._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[27].norm.linear._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].norm.linear._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].norm.linear._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].norm.linear._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[27].norm.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[27].norm.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].norm._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].norm._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].norm._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].norm._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].act_mlp, accessed_by=DictGetItemGuardAccessor(act_mlp) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[27].act_mlp, 140581770780848) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].act_mlp.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].act_mlp.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[27].act_mlp.training, 140591004393440) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].proj_mlp, accessed_by=DictGetItemGuardAccessor(proj_mlp) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[27].proj_mlp, 140533235854080) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].proj_mlp.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[27].proj_mlp.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].proj_mlp.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[27].proj_mlp.training, 140591004393408) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].proj_mlp._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].proj_mlp.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[27].proj_mlp.lora_A, 140533235850528) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].proj_mlp.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].proj_mlp.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[27].proj_mlp.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].proj_mlp.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[27].proj_mlp.lora_A['default_0'], 140533235844192) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].proj_mlp.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].proj_mlp.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[27].proj_mlp.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].proj_mlp.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].proj_mlp.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[27].proj_mlp.lora_A['default_0'].weight, 140526680337568) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].proj_mlp.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[27].proj_mlp.lora_B, 140533235852448) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].proj_mlp.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].proj_mlp.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[27].proj_mlp.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].proj_mlp.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[27].proj_mlp.lora_B['default_0'], 140533235846640) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].proj_mlp.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].proj_mlp.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[27].proj_mlp.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].proj_mlp.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[27].proj_mlp.base_layer, 140581770780800) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].proj_mlp.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].proj_mlp.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[27].proj_mlp.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].proj_mlp.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[27].proj_mlp.lora_dropout, 140533235850432) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].proj_mlp.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].proj_mlp.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[27].proj_mlp.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].proj_mlp.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[27].proj_mlp.lora_dropout['default_0'], 140533235853024) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].proj_mlp.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].proj_mlp.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[27].proj_mlp.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].proj_mlp.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[27].proj_mlp.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[27].proj_mlp.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].proj_mlp.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[27].proj_mlp.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].proj_mlp.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[27].proj_mlp.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[27].proj_mlp.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].proj_mlp.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[27].proj_mlp.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].proj_mlp._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].proj_mlp._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].proj_mlp.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[27].proj_mlp.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[27].proj_mlp.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].proj_mlp._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[27].proj_mlp._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].proj_mlp._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].proj_mlp._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].proj_mlp._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[27].proj_mlp._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[27].proj_mlp._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].proj_out, accessed_by=DictGetItemGuardAccessor(proj_out) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[27].proj_out, 140533235850480) # hidden_states = gate * self.proj_out(hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:98 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].proj_out.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[27].proj_out.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].proj_out.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[27].proj_out.training, 140591004393408) # hidden_states = gate * self.proj_out(hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:98 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].proj_out._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].proj_out.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[27].proj_out.lora_A, 140533235849616) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].proj_out.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].proj_out.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[27].proj_out.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].proj_out.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[27].proj_out.lora_A['default_0'], 140533235850000) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].proj_out.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].proj_out.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[27].proj_out.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].proj_out.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].proj_out.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[27].proj_out.lora_A['default_0'].weight, 140526680336208) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].proj_out.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[27].proj_out.lora_B, 140533235847648) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].proj_out.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].proj_out.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[27].proj_out.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].proj_out.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[27].proj_out.lora_B['default_0'], 140533235853744) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].proj_out.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].proj_out.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[27].proj_out.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].proj_out.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[27].proj_out.base_layer, 140581770780896) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].proj_out.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].proj_out.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[27].proj_out.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].proj_out.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[27].proj_out.lora_dropout, 140533235847696) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].proj_out.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].proj_out.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[27].proj_out.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].proj_out.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[27].proj_out.lora_dropout['default_0'], 140533235845248) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].proj_out.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].proj_out.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[27].proj_out.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].proj_out.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[27].proj_out.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[27].proj_out.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].proj_out.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[27].proj_out.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].proj_out.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[27].proj_out.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[27].proj_out.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].proj_out.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[27].proj_out.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].proj_out._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].proj_out._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].proj_out.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[27].proj_out.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[27].proj_out.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].proj_out._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[27].proj_out._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].proj_out._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].proj_out._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27].proj_out._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[27].proj_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[27].proj_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[27]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28], accessed_by=GetItemGuardAccessor(28) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[28], 140581770780560) # for index_block, block in enumerate(self.single_transformer_blocks): # diffusers/src/diffusers/models/transformers/transformer_flux.py:509 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[28].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[28].training, 140591004393440) # for index_block, block in enumerate(self.single_transformer_blocks): # diffusers/src/diffusers/models/transformers/transformer_flux.py:509 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].attn, accessed_by=DictGetItemGuardAccessor(attn) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[28].attn, 140581770781760) # attn_output = self.attn( # diffusers/src/diffusers/models/transformers/transformer_flux.py:91 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].attn.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[28].attn.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].attn.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[28].attn.training, 140591004393440) # attn_output = self.attn( # diffusers/src/diffusers/models/transformers/transformer_flux.py:91 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].attn._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].attn.to_k, accessed_by=DictGetItemGuardAccessor(to_k) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[28].attn.to_k, 140533236913904) # key = attn.to_k(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1717 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].attn.to_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[28].attn.to_k.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].attn.to_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[28].attn.to_k.training, 140591004393408) # key = attn.to_k(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1717 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].attn.to_k._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].attn.to_k.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[28].attn.to_k.lora_A, 140533236910688) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].attn.to_k.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].attn.to_k.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[28].attn.to_k.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].attn.to_k.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[28].attn.to_k.lora_A['default_0'], 140533236911840) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].attn.to_k.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].attn.to_k.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[28].attn.to_k.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].attn.to_k.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].attn.to_k.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[28].attn.to_k.lora_A['default_0'].weight, 140526787384784) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].attn.to_k.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[28].attn.to_k.lora_B, 140533236914336) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].attn.to_k.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].attn.to_k.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[28].attn.to_k.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].attn.to_k.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[28].attn.to_k.lora_B['default_0'], 140533236910064) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].attn.to_k.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].attn.to_k.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[28].attn.to_k.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].attn.to_k.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[28].attn.to_k.base_layer, 140581770781904) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].attn.to_k.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].attn.to_k.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[28].attn.to_k.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].attn.to_k.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[28].attn.to_k.lora_dropout, 140533236909104) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].attn.to_k.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].attn.to_k.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[28].attn.to_k.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].attn.to_k.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[28].attn.to_k.lora_dropout['default_0'], 140533236912368) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].attn.to_k.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].attn.to_k.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[28].attn.to_k.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].attn.to_k.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[28].attn.to_k.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[28].attn.to_k.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].attn.to_k.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[28].attn.to_k.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].attn.to_k.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[28].attn.to_k.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[28].attn.to_k.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].attn.to_k.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[28].attn.to_k.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].attn.to_k._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].attn.to_k._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].attn.to_k.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[28].attn.to_k.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[28].attn.to_k.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].attn.to_k._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[28].attn.to_k._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].attn.to_k._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].attn.to_k._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].attn.to_k._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[28].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[28].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].attn.to_q, accessed_by=DictGetItemGuardAccessor(to_q) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[28].attn.to_q, 140533236916688) # query = attn.to_q(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1716 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].attn.to_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[28].attn.to_q.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].attn.to_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[28].attn.to_q.training, 140591004393408) # query = attn.to_q(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1716 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].attn.to_q._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].attn.to_q.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[28].attn.to_q.lora_A, 140533236916592) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].attn.to_q.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].attn.to_q.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[28].attn.to_q.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].attn.to_q.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[28].attn.to_q.lora_A['default_0'], 140533236908864) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].attn.to_q.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].attn.to_q.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[28].attn.to_q.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].attn.to_q.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].attn.to_q.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[28].attn.to_q.lora_A['default_0'].weight, 140537326900208) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].attn.to_q.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[28].attn.to_q.lora_B, 140533236913664) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].attn.to_q.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].attn.to_q.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[28].attn.to_q.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].attn.to_q.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[28].attn.to_q.lora_B['default_0'], 140533236909296) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].attn.to_q.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].attn.to_q.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[28].attn.to_q.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].attn.to_q.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[28].attn.to_q.base_layer, 140581770782000) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].attn.to_q.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].attn.to_q.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[28].attn.to_q.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].attn.to_q.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[28].attn.to_q.lora_dropout, 140533236914528) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].attn.to_q.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].attn.to_q.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[28].attn.to_q.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].attn.to_q.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[28].attn.to_q.lora_dropout['default_0'], 140533236917360) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].attn.to_q.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].attn.to_q.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[28].attn.to_q.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].attn.to_q.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[28].attn.to_q.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[28].attn.to_q.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].attn.to_q.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[28].attn.to_q.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].attn.to_q.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[28].attn.to_q.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[28].attn.to_q.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].attn.to_q.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[28].attn.to_q.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].attn.to_q._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].attn.to_q._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].attn.to_q.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[28].attn.to_q.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[28].attn.to_q.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].attn.to_q._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[28].attn.to_q._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].attn.to_q._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].attn.to_q._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].attn.to_q._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[28].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[28].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].attn.to_v, accessed_by=DictGetItemGuardAccessor(to_v) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[28].attn.to_v, 140533238103440) # value = attn.to_v(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1718 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].attn.to_v.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[28].attn.to_v.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].attn.to_v.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[28].attn.to_v.training, 140591004393408) # value = attn.to_v(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1718 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].attn.to_v._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].attn.to_v.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[28].attn.to_v.lora_A, 140533238101232) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].attn.to_v.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].attn.to_v.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[28].attn.to_v.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].attn.to_v.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[28].attn.to_v.lora_A['default_0'], 140533238096720) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].attn.to_v.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].attn.to_v.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[28].attn.to_v.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].attn.to_v.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].attn.to_v.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[28].attn.to_v.lora_A['default_0'].weight, 140526787390624) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].attn.to_v.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[28].attn.to_v.lora_B, 140533238088560) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].attn.to_v.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].attn.to_v.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[28].attn.to_v.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].attn.to_v.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[28].attn.to_v.lora_B['default_0'], 140533238098784) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].attn.to_v.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].attn.to_v.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[28].attn.to_v.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].attn.to_v.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[28].attn.to_v.base_layer, 140581770782048) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].attn.to_v.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].attn.to_v.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[28].attn.to_v.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].attn.to_v.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[28].attn.to_v.lora_dropout, 140533238102432) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].attn.to_v.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].attn.to_v.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[28].attn.to_v.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].attn.to_v.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[28].attn.to_v.lora_dropout['default_0'], 140533238097968) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].attn.to_v.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].attn.to_v.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[28].attn.to_v.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].attn.to_v.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[28].attn.to_v.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[28].attn.to_v.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].attn.to_v.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[28].attn.to_v.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].attn.to_v.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[28].attn.to_v.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[28].attn.to_v.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].attn.to_v.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[28].attn.to_v.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].attn.to_v._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].attn.to_v._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].attn.to_v.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[28].attn.to_v.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[28].attn.to_v.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].attn.to_v._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[28].attn.to_v._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].attn.to_v._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].attn.to_v._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].attn.to_v._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[28].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[28].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].attn.norm_k, accessed_by=DictGetItemGuardAccessor(norm_k) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[28].attn.norm_k, 140581770781952) # if attn.norm_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1729 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].attn.norm_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[28].attn.norm_k.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].attn.norm_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[28].attn.norm_k.training, 140591004393440) # if attn.norm_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1729 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].attn.norm_k.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[28].attn.norm_k.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].attn.norm_k._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].attn.norm_k.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[28].attn.norm_k.weight, 140581765880688) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].attn.norm_k._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].attn.norm_k._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].attn.norm_k._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].attn.norm_k._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].attn.norm_q, accessed_by=DictGetItemGuardAccessor(norm_q) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[28].attn.norm_q, 140581770781808) # if attn.norm_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1727 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].attn.norm_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[28].attn.norm_q.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].attn.norm_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[28].attn.norm_q.training, 140591004393440) # if attn.norm_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1727 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].attn.norm_q.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[28].attn.norm_q.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].attn.norm_q._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].attn.norm_q.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[28].attn.norm_q.weight, 140581783345232) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].attn.norm_q._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].attn.norm_q._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].attn.norm_q._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].attn.norm_q._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].attn.heads, accessed_by=DictGetItemGuardAccessor(heads) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[28].attn.heads == 24 # head_dim = inner_dim // attn.heads # diffusers/src/diffusers/models/attention_processor.py:1721 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].attn.processor, accessed_by=DictGetItemGuardAccessor(processor) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[28].attn.processor, 93831581524080) # attn_parameters = set(inspect.signature(self.processor.__call__).parameters.keys()) # diffusers/src/diffusers/models/attention_processor.py:479 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[28].attn.processor, 140581770781712) # return self.processor( # diffusers/src/diffusers/models/attention_processor.py:490 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].attn._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].attn._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].attn._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].attn._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].attn.forward, accessed_by=GetAttrGuardAccessor(forward) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].attn.forward, accessed_by=FuncDefaultsGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].attn.forward.__defaults__[0], accessed_by=GetItemGuardAccessor(0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[28].attn.forward.__defaults__[0], 140591004478624) # batch_size, _, _ = hidden_states.shape if encoder_hidden_states is None else encoder_hidden_states.shape # diffusers/src/diffusers/models/attention_processor.py:1713 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].norm, accessed_by=DictGetItemGuardAccessor(norm) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[28].norm, 140581770781376) # norm_hidden_states, gate = self.norm(hidden_states, emb=temb) # diffusers/src/diffusers/models/transformers/transformer_flux.py:88 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].norm.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[28].norm.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].norm.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[28].norm.training, 140591004393440) # norm_hidden_states, gate = self.norm(hidden_states, emb=temb) # diffusers/src/diffusers/models/transformers/transformer_flux.py:88 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].norm._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].norm.norm, accessed_by=DictGetItemGuardAccessor(norm) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[28].norm.norm, 140581770781520) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:171 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].norm.norm.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].norm.norm.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[28].norm.norm.training, 140591004393440) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:171 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].norm.silu, accessed_by=DictGetItemGuardAccessor(silu) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[28].norm.silu, 140581770781424) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].norm.silu.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].norm.silu.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[28].norm.silu.training, 140591004393440) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].norm.linear, accessed_by=DictGetItemGuardAccessor(linear) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[28].norm.linear, 140533236778128) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].norm.linear.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[28].norm.linear.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].norm.linear.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[28].norm.linear.training, 140591004393408) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].norm.linear._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].norm.linear.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[28].norm.linear.lora_A, 140533236779808) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].norm.linear.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].norm.linear.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[28].norm.linear.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].norm.linear.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[28].norm.linear.lora_A['default_0'], 140533236781728) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].norm.linear.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].norm.linear.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[28].norm.linear.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].norm.linear.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].norm.linear.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[28].norm.linear.lora_A['default_0'].weight, 140537641692960) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].norm.linear.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[28].norm.linear.lora_B, 140533236780192) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].norm.linear.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].norm.linear.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[28].norm.linear.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].norm.linear.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[28].norm.linear.lora_B['default_0'], 140533236781152) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].norm.linear.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].norm.linear.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[28].norm.linear.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].norm.linear.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[28].norm.linear.base_layer, 140581770781472) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].norm.linear.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].norm.linear.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[28].norm.linear.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].norm.linear.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[28].norm.linear.lora_dropout, 140533236777216) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].norm.linear.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].norm.linear.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[28].norm.linear.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].norm.linear.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[28].norm.linear.lora_dropout['default_0'], 140533236779376) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].norm.linear.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].norm.linear.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[28].norm.linear.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].norm.linear.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[28].norm.linear.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[28].norm.linear.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].norm.linear.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[28].norm.linear.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].norm.linear.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[28].norm.linear.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[28].norm.linear.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].norm.linear.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[28].norm.linear.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].norm.linear._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].norm.linear._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].norm.linear.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[28].norm.linear.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[28].norm.linear.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].norm.linear._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[28].norm.linear._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].norm.linear._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].norm.linear._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].norm.linear._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[28].norm.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[28].norm.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].norm._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].norm._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].norm._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].norm._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].act_mlp, accessed_by=DictGetItemGuardAccessor(act_mlp) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[28].act_mlp, 140581770781616) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].act_mlp.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].act_mlp.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[28].act_mlp.training, 140591004393440) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].proj_mlp, accessed_by=DictGetItemGuardAccessor(proj_mlp) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[28].proj_mlp, 140533236782112) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].proj_mlp.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[28].proj_mlp.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].proj_mlp.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[28].proj_mlp.training, 140591004393408) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].proj_mlp._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].proj_mlp.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[28].proj_mlp.lora_A, 140533236781344) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].proj_mlp.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].proj_mlp.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[28].proj_mlp.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].proj_mlp.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[28].proj_mlp.lora_A['default_0'], 140533236916400) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].proj_mlp.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].proj_mlp.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[28].proj_mlp.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].proj_mlp.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].proj_mlp.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[28].proj_mlp.lora_A['default_0'].weight, 140537641683120) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].proj_mlp.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[28].proj_mlp.lora_B, 140533236922496) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].proj_mlp.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].proj_mlp.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[28].proj_mlp.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].proj_mlp.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[28].proj_mlp.lora_B['default_0'], 140533236922256) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].proj_mlp.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].proj_mlp.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[28].proj_mlp.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].proj_mlp.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[28].proj_mlp.base_layer, 140581770781568) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].proj_mlp.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].proj_mlp.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[28].proj_mlp.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].proj_mlp.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[28].proj_mlp.lora_dropout, 140533236787776) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].proj_mlp.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].proj_mlp.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[28].proj_mlp.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].proj_mlp.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[28].proj_mlp.lora_dropout['default_0'], 140533236789312) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].proj_mlp.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].proj_mlp.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[28].proj_mlp.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].proj_mlp.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[28].proj_mlp.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[28].proj_mlp.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].proj_mlp.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[28].proj_mlp.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].proj_mlp.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[28].proj_mlp.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[28].proj_mlp.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].proj_mlp.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[28].proj_mlp.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].proj_mlp._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].proj_mlp._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].proj_mlp.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[28].proj_mlp.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[28].proj_mlp.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].proj_mlp._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[28].proj_mlp._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].proj_mlp._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].proj_mlp._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].proj_mlp._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[28].proj_mlp._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[28].proj_mlp._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].proj_out, accessed_by=DictGetItemGuardAccessor(proj_out) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[28].proj_out, 140533236917984) # hidden_states = gate * self.proj_out(hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:98 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].proj_out.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[28].proj_out.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].proj_out.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[28].proj_out.training, 140591004393408) # hidden_states = gate * self.proj_out(hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:98 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].proj_out._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].proj_out.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[28].proj_out.lora_A, 140533236918608) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].proj_out.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].proj_out.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[28].proj_out.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].proj_out.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[28].proj_out.lora_A['default_0'], 140533236924320) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].proj_out.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].proj_out.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[28].proj_out.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].proj_out.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].proj_out.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[28].proj_out.lora_A['default_0'].weight, 140537641693200) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].proj_out.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[28].proj_out.lora_B, 140533236921152) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].proj_out.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].proj_out.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[28].proj_out.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].proj_out.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[28].proj_out.lora_B['default_0'], 140533236917504) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].proj_out.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].proj_out.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[28].proj_out.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].proj_out.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[28].proj_out.base_layer, 140581770781664) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].proj_out.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].proj_out.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[28].proj_out.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].proj_out.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[28].proj_out.lora_dropout, 140533236922160) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].proj_out.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].proj_out.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[28].proj_out.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].proj_out.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[28].proj_out.lora_dropout['default_0'], 140533236920432) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].proj_out.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].proj_out.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[28].proj_out.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].proj_out.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[28].proj_out.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[28].proj_out.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].proj_out.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[28].proj_out.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].proj_out.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[28].proj_out.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[28].proj_out.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].proj_out.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[28].proj_out.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].proj_out._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].proj_out._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].proj_out.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[28].proj_out.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[28].proj_out.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].proj_out._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[28].proj_out._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].proj_out._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].proj_out._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28].proj_out._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[28].proj_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[28].proj_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[28]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29], accessed_by=GetItemGuardAccessor(29) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[29], 140581770781328) # for index_block, block in enumerate(self.single_transformer_blocks): # diffusers/src/diffusers/models/transformers/transformer_flux.py:509 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[29].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[29].training, 140591004393440) # for index_block, block in enumerate(self.single_transformer_blocks): # diffusers/src/diffusers/models/transformers/transformer_flux.py:509 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].attn, accessed_by=DictGetItemGuardAccessor(attn) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[29].attn, 140581770782528) # attn_output = self.attn( # diffusers/src/diffusers/models/transformers/transformer_flux.py:91 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].attn.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[29].attn.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].attn.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[29].attn.training, 140591004393440) # attn_output = self.attn( # diffusers/src/diffusers/models/transformers/transformer_flux.py:91 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].attn._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].attn.to_k, accessed_by=DictGetItemGuardAccessor(to_k) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[29].attn.to_k, 140533237336336) # key = attn.to_k(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1717 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].attn.to_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[29].attn.to_k.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].attn.to_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[29].attn.to_k.training, 140591004393408) # key = attn.to_k(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1717 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].attn.to_k._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].attn.to_k.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[29].attn.to_k.lora_A, 140533237338016) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].attn.to_k.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].attn.to_k.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[29].attn.to_k.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].attn.to_k.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[29].attn.to_k.lora_A['default_0'], 140533237338352) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].attn.to_k.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].attn.to_k.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[29].attn.to_k.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].attn.to_k.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].attn.to_k.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[29].attn.to_k.lora_A['default_0'].weight, 140526664989840) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].attn.to_k.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[29].attn.to_k.lora_B, 140533237337824) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].attn.to_k.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].attn.to_k.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[29].attn.to_k.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].attn.to_k.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[29].attn.to_k.lora_B['default_0'], 140533237336816) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].attn.to_k.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].attn.to_k.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[29].attn.to_k.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].attn.to_k.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[29].attn.to_k.base_layer, 140581770782672) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].attn.to_k.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].attn.to_k.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[29].attn.to_k.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].attn.to_k.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[29].attn.to_k.lora_dropout, 140533237342576) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].attn.to_k.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].attn.to_k.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[29].attn.to_k.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].attn.to_k.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[29].attn.to_k.lora_dropout['default_0'], 140533237337584) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].attn.to_k.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].attn.to_k.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[29].attn.to_k.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].attn.to_k.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[29].attn.to_k.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[29].attn.to_k.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].attn.to_k.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[29].attn.to_k.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].attn.to_k.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[29].attn.to_k.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[29].attn.to_k.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].attn.to_k.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[29].attn.to_k.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].attn.to_k._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].attn.to_k._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].attn.to_k.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[29].attn.to_k.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[29].attn.to_k.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].attn.to_k._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[29].attn.to_k._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].attn.to_k._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].attn.to_k._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].attn.to_k._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[29].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[29].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].attn.to_q, accessed_by=DictGetItemGuardAccessor(to_q) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[29].attn.to_q, 140533237337056) # query = attn.to_q(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1716 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].attn.to_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[29].attn.to_q.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].attn.to_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[29].attn.to_q.training, 140591004393408) # query = attn.to_q(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1716 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].attn.to_q._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].attn.to_q.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[29].attn.to_q.lora_A, 140533237346752) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].attn.to_q.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].attn.to_q.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[29].attn.to_q.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].attn.to_q.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[29].attn.to_q.lora_A['default_0'], 140533237346560) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].attn.to_q.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].attn.to_q.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[29].attn.to_q.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].attn.to_q.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].attn.to_q.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[29].attn.to_q.lora_A['default_0'].weight, 140526664975600) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].attn.to_q.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[29].attn.to_q.lora_B, 140533237342768) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].attn.to_q.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].attn.to_q.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[29].attn.to_q.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].attn.to_q.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[29].attn.to_q.lora_B['default_0'], 140533237342480) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].attn.to_q.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].attn.to_q.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[29].attn.to_q.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].attn.to_q.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[29].attn.to_q.base_layer, 140581770782768) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].attn.to_q.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].attn.to_q.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[29].attn.to_q.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].attn.to_q.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[29].attn.to_q.lora_dropout, 140533237347664) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].attn.to_q.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].attn.to_q.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[29].attn.to_q.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].attn.to_q.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[29].attn.to_q.lora_dropout['default_0'], 140533237344880) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].attn.to_q.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].attn.to_q.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[29].attn.to_q.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].attn.to_q.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[29].attn.to_q.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[29].attn.to_q.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].attn.to_q.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[29].attn.to_q.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].attn.to_q.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[29].attn.to_q.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[29].attn.to_q.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].attn.to_q.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[29].attn.to_q.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].attn.to_q._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].attn.to_q._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].attn.to_q.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[29].attn.to_q.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[29].attn.to_q.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].attn.to_q._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[29].attn.to_q._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].attn.to_q._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].attn.to_q._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].attn.to_q._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[29].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[29].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].attn.to_v, accessed_by=DictGetItemGuardAccessor(to_v) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[29].attn.to_v, 140533237335088) # value = attn.to_v(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1718 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].attn.to_v.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[29].attn.to_v.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].attn.to_v.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[29].attn.to_v.training, 140591004393408) # value = attn.to_v(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1718 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].attn.to_v._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].attn.to_v.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[29].attn.to_v.lora_A, 140533237334320) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].attn.to_v.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].attn.to_v.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[29].attn.to_v.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].attn.to_v.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[29].attn.to_v.lora_A['default_0'], 140533237348576) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].attn.to_v.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].attn.to_v.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[29].attn.to_v.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].attn.to_v.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].attn.to_v.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[29].attn.to_v.lora_A['default_0'].weight, 140526659357888) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].attn.to_v.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[29].attn.to_v.lora_B, 140533237336144) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].attn.to_v.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].attn.to_v.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[29].attn.to_v.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].attn.to_v.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[29].attn.to_v.lora_B['default_0'], 140533237343248) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].attn.to_v.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].attn.to_v.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[29].attn.to_v.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].attn.to_v.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[29].attn.to_v.base_layer, 140581770782816) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].attn.to_v.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].attn.to_v.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[29].attn.to_v.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].attn.to_v.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[29].attn.to_v.lora_dropout, 140533237338496) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].attn.to_v.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].attn.to_v.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[29].attn.to_v.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].attn.to_v.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[29].attn.to_v.lora_dropout['default_0'], 140533237335712) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].attn.to_v.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].attn.to_v.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[29].attn.to_v.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].attn.to_v.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[29].attn.to_v.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[29].attn.to_v.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].attn.to_v.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[29].attn.to_v.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].attn.to_v.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[29].attn.to_v.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[29].attn.to_v.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].attn.to_v.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[29].attn.to_v.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].attn.to_v._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].attn.to_v._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].attn.to_v.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[29].attn.to_v.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[29].attn.to_v.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].attn.to_v._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[29].attn.to_v._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].attn.to_v._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].attn.to_v._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].attn.to_v._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[29].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[29].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].attn.norm_k, accessed_by=DictGetItemGuardAccessor(norm_k) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[29].attn.norm_k, 140581770782720) # if attn.norm_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1729 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].attn.norm_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[29].attn.norm_k.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].attn.norm_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[29].attn.norm_k.training, 140591004393440) # if attn.norm_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1729 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].attn.norm_k.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[29].attn.norm_k.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].attn.norm_k._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].attn.norm_k.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[29].attn.norm_k.weight, 140581771023984) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].attn.norm_k._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].attn.norm_k._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].attn.norm_k._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].attn.norm_k._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].attn.norm_q, accessed_by=DictGetItemGuardAccessor(norm_q) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[29].attn.norm_q, 140581770782576) # if attn.norm_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1727 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].attn.norm_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[29].attn.norm_q.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].attn.norm_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[29].attn.norm_q.training, 140591004393440) # if attn.norm_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1727 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].attn.norm_q.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[29].attn.norm_q.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].attn.norm_q._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].attn.norm_q.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[29].attn.norm_q.weight, 140581783346352) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].attn.norm_q._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].attn.norm_q._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].attn.norm_q._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].attn.norm_q._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].attn.heads, accessed_by=DictGetItemGuardAccessor(heads) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[29].attn.heads == 24 # head_dim = inner_dim // attn.heads # diffusers/src/diffusers/models/attention_processor.py:1721 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].attn.processor, accessed_by=DictGetItemGuardAccessor(processor) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[29].attn.processor, 93831581524080) # attn_parameters = set(inspect.signature(self.processor.__call__).parameters.keys()) # diffusers/src/diffusers/models/attention_processor.py:479 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[29].attn.processor, 140581770782480) # return self.processor( # diffusers/src/diffusers/models/attention_processor.py:490 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].attn._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].attn._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].attn._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].attn._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].attn.forward, accessed_by=GetAttrGuardAccessor(forward) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].attn.forward, accessed_by=FuncDefaultsGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].attn.forward.__defaults__[0], accessed_by=GetItemGuardAccessor(0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[29].attn.forward.__defaults__[0], 140591004478624) # batch_size, _, _ = hidden_states.shape if encoder_hidden_states is None else encoder_hidden_states.shape # diffusers/src/diffusers/models/attention_processor.py:1713 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].norm, accessed_by=DictGetItemGuardAccessor(norm) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[29].norm, 140581770782144) # norm_hidden_states, gate = self.norm(hidden_states, emb=temb) # diffusers/src/diffusers/models/transformers/transformer_flux.py:88 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].norm.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[29].norm.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].norm.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[29].norm.training, 140591004393440) # norm_hidden_states, gate = self.norm(hidden_states, emb=temb) # diffusers/src/diffusers/models/transformers/transformer_flux.py:88 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].norm._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].norm.norm, accessed_by=DictGetItemGuardAccessor(norm) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[29].norm.norm, 140581770782288) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:171 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].norm.norm.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].norm.norm.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[29].norm.norm.training, 140591004393440) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:171 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].norm.silu, accessed_by=DictGetItemGuardAccessor(silu) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[29].norm.silu, 140581770782192) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].norm.silu.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].norm.silu.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[29].norm.silu.training, 140591004393440) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].norm.linear, accessed_by=DictGetItemGuardAccessor(linear) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[29].norm.linear, 140533238097872) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].norm.linear.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[29].norm.linear.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].norm.linear.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[29].norm.linear.training, 140591004393408) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].norm.linear._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].norm.linear.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[29].norm.linear.lora_A, 140533238102144) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].norm.linear.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].norm.linear.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[29].norm.linear.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].norm.linear.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[29].norm.linear.lora_A['default_0'], 140533238090672) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].norm.linear.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].norm.linear.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[29].norm.linear.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].norm.linear.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].norm.linear.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[29].norm.linear.lora_A['default_0'].weight, 140526787380704) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].norm.linear.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[29].norm.linear.lora_B, 140533238094560) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].norm.linear.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].norm.linear.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[29].norm.linear.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].norm.linear.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[29].norm.linear.lora_B['default_0'], 140533238090432) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].norm.linear.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].norm.linear.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[29].norm.linear.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].norm.linear.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[29].norm.linear.base_layer, 140581770782240) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].norm.linear.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].norm.linear.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[29].norm.linear.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].norm.linear.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[29].norm.linear.lora_dropout, 140533238100752) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].norm.linear.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].norm.linear.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[29].norm.linear.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].norm.linear.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[29].norm.linear.lora_dropout['default_0'], 140533238095376) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].norm.linear.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].norm.linear.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[29].norm.linear.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].norm.linear.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[29].norm.linear.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[29].norm.linear.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].norm.linear.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[29].norm.linear.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].norm.linear.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[29].norm.linear.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[29].norm.linear.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].norm.linear.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[29].norm.linear.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].norm.linear._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].norm.linear._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].norm.linear.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[29].norm.linear.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[29].norm.linear.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].norm.linear._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[29].norm.linear._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].norm.linear._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].norm.linear._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].norm.linear._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[29].norm.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[29].norm.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].norm._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].norm._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].norm._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].norm._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].act_mlp, accessed_by=DictGetItemGuardAccessor(act_mlp) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[29].act_mlp, 140581770782384) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].act_mlp.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].act_mlp.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[29].act_mlp.training, 140591004393440) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].proj_mlp, accessed_by=DictGetItemGuardAccessor(proj_mlp) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[29].proj_mlp, 140533238092352) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].proj_mlp.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[29].proj_mlp.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].proj_mlp.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[29].proj_mlp.training, 140591004393408) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].proj_mlp._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].proj_mlp.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[29].proj_mlp.lora_A, 140533238095616) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].proj_mlp.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].proj_mlp.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[29].proj_mlp.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].proj_mlp.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[29].proj_mlp.lora_A['default_0'], 140533238091632) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].proj_mlp.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].proj_mlp.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[29].proj_mlp.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].proj_mlp.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].proj_mlp.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[29].proj_mlp.lora_A['default_0'].weight, 140526787386224) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].proj_mlp.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[29].proj_mlp.lora_B, 140533238088752) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].proj_mlp.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].proj_mlp.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[29].proj_mlp.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].proj_mlp.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[29].proj_mlp.lora_B['default_0'], 140533238090480) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].proj_mlp.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].proj_mlp.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[29].proj_mlp.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].proj_mlp.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[29].proj_mlp.base_layer, 140581770782336) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].proj_mlp.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].proj_mlp.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[29].proj_mlp.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].proj_mlp.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[29].proj_mlp.lora_dropout, 140533238092544) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].proj_mlp.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].proj_mlp.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[29].proj_mlp.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].proj_mlp.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[29].proj_mlp.lora_dropout['default_0'], 140533238089328) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].proj_mlp.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].proj_mlp.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[29].proj_mlp.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].proj_mlp.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[29].proj_mlp.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[29].proj_mlp.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].proj_mlp.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[29].proj_mlp.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].proj_mlp.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[29].proj_mlp.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[29].proj_mlp.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].proj_mlp.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[29].proj_mlp.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].proj_mlp._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].proj_mlp._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].proj_mlp.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[29].proj_mlp.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[29].proj_mlp.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].proj_mlp._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[29].proj_mlp._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].proj_mlp._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].proj_mlp._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].proj_mlp._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[29].proj_mlp._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[29].proj_mlp._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].proj_out, accessed_by=DictGetItemGuardAccessor(proj_out) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[29].proj_out, 140533238089568) # hidden_states = gate * self.proj_out(hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:98 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].proj_out.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[29].proj_out.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].proj_out.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[29].proj_out.training, 140591004393408) # hidden_states = gate * self.proj_out(hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:98 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].proj_out._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].proj_out.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[29].proj_out.lora_A, 140533238090336) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].proj_out.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].proj_out.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[29].proj_out.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].proj_out.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[29].proj_out.lora_A['default_0'], 140533237346800) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].proj_out.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].proj_out.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[29].proj_out.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].proj_out.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].proj_out.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[29].proj_out.lora_A['default_0'].weight, 140526787384544) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].proj_out.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[29].proj_out.lora_B, 140533237345552) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].proj_out.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].proj_out.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[29].proj_out.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].proj_out.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[29].proj_out.lora_B['default_0'], 140533237349152) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].proj_out.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].proj_out.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[29].proj_out.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].proj_out.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[29].proj_out.base_layer, 140581770782432) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].proj_out.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].proj_out.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[29].proj_out.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].proj_out.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[29].proj_out.lora_dropout, 140533238095808) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].proj_out.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].proj_out.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[29].proj_out.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].proj_out.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[29].proj_out.lora_dropout['default_0'], 140533238103536) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].proj_out.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].proj_out.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[29].proj_out.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].proj_out.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[29].proj_out.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[29].proj_out.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].proj_out.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[29].proj_out.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].proj_out.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[29].proj_out.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[29].proj_out.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].proj_out.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[29].proj_out.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].proj_out._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].proj_out._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].proj_out.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[29].proj_out.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[29].proj_out.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].proj_out._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[29].proj_out._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].proj_out._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].proj_out._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29].proj_out._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[29].proj_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[29].proj_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[29]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30], accessed_by=GetItemGuardAccessor(30) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[30], 140581770782096) # for index_block, block in enumerate(self.single_transformer_blocks): # diffusers/src/diffusers/models/transformers/transformer_flux.py:509 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[30].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[30].training, 140591004393440) # for index_block, block in enumerate(self.single_transformer_blocks): # diffusers/src/diffusers/models/transformers/transformer_flux.py:509 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].attn, accessed_by=DictGetItemGuardAccessor(attn) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[30].attn, 140581770783296) # attn_output = self.attn( # diffusers/src/diffusers/models/transformers/transformer_flux.py:91 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].attn.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[30].attn.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].attn.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[30].attn.training, 140591004393440) # attn_output = self.attn( # diffusers/src/diffusers/models/transformers/transformer_flux.py:91 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].attn._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].attn.to_k, accessed_by=DictGetItemGuardAccessor(to_k) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[30].attn.to_k, 140533242126656) # key = attn.to_k(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1717 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].attn.to_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[30].attn.to_k.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].attn.to_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[30].attn.to_k.training, 140591004393408) # key = attn.to_k(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1717 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].attn.to_k._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].attn.to_k.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[30].attn.to_k.lora_A, 140537202936368) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].attn.to_k.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].attn.to_k.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[30].attn.to_k.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].attn.to_k.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[30].attn.to_k.lora_A['default_0'], 140537202945344) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].attn.to_k.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].attn.to_k.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[30].attn.to_k.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].attn.to_k.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].attn.to_k.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[30].attn.to_k.lora_A['default_0'].weight, 140531238504064) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].attn.to_k.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[30].attn.to_k.lora_B, 140537202936320) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].attn.to_k.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].attn.to_k.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[30].attn.to_k.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].attn.to_k.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[30].attn.to_k.lora_B['default_0'], 140537202943088) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].attn.to_k.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].attn.to_k.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[30].attn.to_k.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].attn.to_k.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[30].attn.to_k.base_layer, 140581770783440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].attn.to_k.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].attn.to_k.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[30].attn.to_k.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].attn.to_k.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[30].attn.to_k.lora_dropout, 140533242126992) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].attn.to_k.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].attn.to_k.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[30].attn.to_k.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].attn.to_k.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[30].attn.to_k.lora_dropout['default_0'], 140533242131312) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].attn.to_k.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].attn.to_k.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[30].attn.to_k.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].attn.to_k.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[30].attn.to_k.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[30].attn.to_k.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].attn.to_k.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[30].attn.to_k.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].attn.to_k.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[30].attn.to_k.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[30].attn.to_k.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].attn.to_k.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[30].attn.to_k.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].attn.to_k._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].attn.to_k._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].attn.to_k.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[30].attn.to_k.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[30].attn.to_k.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].attn.to_k._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[30].attn.to_k._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].attn.to_k._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].attn.to_k._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].attn.to_k._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[30].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[30].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].attn.to_q, accessed_by=DictGetItemGuardAccessor(to_q) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[30].attn.to_q, 140533242119552) # query = attn.to_q(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1716 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].attn.to_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[30].attn.to_q.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].attn.to_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[30].attn.to_q.training, 140591004393408) # query = attn.to_q(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1716 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].attn.to_q._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].attn.to_q.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[30].attn.to_q.lora_A, 140533242119024) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].attn.to_q.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].attn.to_q.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[30].attn.to_q.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].attn.to_q.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[30].attn.to_q.lora_A['default_0'], 140533242123680) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].attn.to_q.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].attn.to_q.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[30].attn.to_q.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].attn.to_q.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].attn.to_q.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[30].attn.to_q.lora_A['default_0'].weight, 140531238507744) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].attn.to_q.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[30].attn.to_q.lora_B, 140533242120560) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].attn.to_q.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].attn.to_q.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[30].attn.to_q.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].attn.to_q.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[30].attn.to_q.lora_B['default_0'], 140533242127568) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].attn.to_q.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].attn.to_q.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[30].attn.to_q.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].attn.to_q.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[30].attn.to_q.base_layer, 140581770783536) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].attn.to_q.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].attn.to_q.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[30].attn.to_q.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].attn.to_q.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[30].attn.to_q.lora_dropout, 140533242119408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].attn.to_q.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].attn.to_q.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[30].attn.to_q.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].attn.to_q.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[30].attn.to_q.lora_dropout['default_0'], 140533242121904) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].attn.to_q.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].attn.to_q.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[30].attn.to_q.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].attn.to_q.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[30].attn.to_q.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[30].attn.to_q.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].attn.to_q.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[30].attn.to_q.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].attn.to_q.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[30].attn.to_q.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[30].attn.to_q.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].attn.to_q.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[30].attn.to_q.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].attn.to_q._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].attn.to_q._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].attn.to_q.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[30].attn.to_q.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[30].attn.to_q.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].attn.to_q._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[30].attn.to_q._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].attn.to_q._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].attn.to_q._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].attn.to_q._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[30].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[30].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].attn.to_v, accessed_by=DictGetItemGuardAccessor(to_v) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[30].attn.to_v, 140537202949424) # value = attn.to_v(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1718 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].attn.to_v.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[30].attn.to_v.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].attn.to_v.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[30].attn.to_v.training, 140591004393408) # value = attn.to_v(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1718 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].attn.to_v._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].attn.to_v.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[30].attn.to_v.lora_A, 140537202948944) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].attn.to_v.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].attn.to_v.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[30].attn.to_v.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].attn.to_v.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[30].attn.to_v.lora_A['default_0'], 140537202946544) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].attn.to_v.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].attn.to_v.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[30].attn.to_v.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].attn.to_v.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].attn.to_v.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[30].attn.to_v.lora_A['default_0'].weight, 140531238512304) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].attn.to_v.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[30].attn.to_v.lora_B, 140537202949904) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].attn.to_v.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].attn.to_v.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[30].attn.to_v.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].attn.to_v.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[30].attn.to_v.lora_B['default_0'], 140537202936944) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].attn.to_v.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].attn.to_v.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[30].attn.to_v.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].attn.to_v.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[30].attn.to_v.base_layer, 140581770783584) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].attn.to_v.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].attn.to_v.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[30].attn.to_v.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].attn.to_v.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[30].attn.to_v.lora_dropout, 140537202943904) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].attn.to_v.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].attn.to_v.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[30].attn.to_v.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].attn.to_v.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[30].attn.to_v.lora_dropout['default_0'], 140537202946400) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].attn.to_v.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].attn.to_v.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[30].attn.to_v.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].attn.to_v.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[30].attn.to_v.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[30].attn.to_v.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].attn.to_v.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[30].attn.to_v.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].attn.to_v.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[30].attn.to_v.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[30].attn.to_v.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].attn.to_v.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[30].attn.to_v.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].attn.to_v._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].attn.to_v._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].attn.to_v.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[30].attn.to_v.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[30].attn.to_v.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].attn.to_v._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[30].attn.to_v._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].attn.to_v._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].attn.to_v._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].attn.to_v._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[30].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[30].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].attn.norm_k, accessed_by=DictGetItemGuardAccessor(norm_k) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[30].attn.norm_k, 140581770783488) # if attn.norm_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1729 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].attn.norm_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[30].attn.norm_k.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].attn.norm_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[30].attn.norm_k.training, 140591004393440) # if attn.norm_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1729 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].attn.norm_k.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[30].attn.norm_k.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].attn.norm_k._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].attn.norm_k.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[30].attn.norm_k.weight, 140581772499904) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].attn.norm_k._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].attn.norm_k._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].attn.norm_k._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].attn.norm_k._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].attn.norm_q, accessed_by=DictGetItemGuardAccessor(norm_q) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[30].attn.norm_q, 140581770783344) # if attn.norm_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1727 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].attn.norm_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[30].attn.norm_q.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].attn.norm_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[30].attn.norm_q.training, 140591004393440) # if attn.norm_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1727 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].attn.norm_q.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[30].attn.norm_q.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].attn.norm_q._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].attn.norm_q.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[30].attn.norm_q.weight, 140581766117024) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].attn.norm_q._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].attn.norm_q._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].attn.norm_q._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].attn.norm_q._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].attn.heads, accessed_by=DictGetItemGuardAccessor(heads) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[30].attn.heads == 24 # head_dim = inner_dim // attn.heads # diffusers/src/diffusers/models/attention_processor.py:1721 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].attn.processor, accessed_by=DictGetItemGuardAccessor(processor) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[30].attn.processor, 93831581524080) # attn_parameters = set(inspect.signature(self.processor.__call__).parameters.keys()) # diffusers/src/diffusers/models/attention_processor.py:479 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[30].attn.processor, 140581770783248) # return self.processor( # diffusers/src/diffusers/models/attention_processor.py:490 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].attn._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].attn._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].attn._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].attn._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].attn.forward, accessed_by=GetAttrGuardAccessor(forward) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].attn.forward, accessed_by=FuncDefaultsGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].attn.forward.__defaults__[0], accessed_by=GetItemGuardAccessor(0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[30].attn.forward.__defaults__[0], 140591004478624) # batch_size, _, _ = hidden_states.shape if encoder_hidden_states is None else encoder_hidden_states.shape # diffusers/src/diffusers/models/attention_processor.py:1713 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].norm, accessed_by=DictGetItemGuardAccessor(norm) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[30].norm, 140581770782912) # norm_hidden_states, gate = self.norm(hidden_states, emb=temb) # diffusers/src/diffusers/models/transformers/transformer_flux.py:88 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].norm.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[30].norm.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].norm.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[30].norm.training, 140591004393440) # norm_hidden_states, gate = self.norm(hidden_states, emb=temb) # diffusers/src/diffusers/models/transformers/transformer_flux.py:88 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].norm._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].norm.norm, accessed_by=DictGetItemGuardAccessor(norm) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[30].norm.norm, 140581770783056) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:171 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].norm.norm.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].norm.norm.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[30].norm.norm.training, 140591004393440) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:171 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].norm.silu, accessed_by=DictGetItemGuardAccessor(silu) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[30].norm.silu, 140581770782960) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].norm.silu.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].norm.silu.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[30].norm.silu.training, 140591004393440) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].norm.linear, accessed_by=DictGetItemGuardAccessor(linear) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[30].norm.linear, 140533237341424) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].norm.linear.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[30].norm.linear.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].norm.linear.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[30].norm.linear.training, 140591004393408) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].norm.linear._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].norm.linear.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[30].norm.linear.lora_A, 140533242123392) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].norm.linear.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].norm.linear.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[30].norm.linear.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].norm.linear.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[30].norm.linear.lora_A['default_0'], 140533242130160) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].norm.linear.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].norm.linear.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[30].norm.linear.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].norm.linear.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].norm.linear.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[30].norm.linear.lora_A['default_0'].weight, 140531238513104) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].norm.linear.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[30].norm.linear.lora_B, 140533242130448) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].norm.linear.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].norm.linear.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[30].norm.linear.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].norm.linear.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[30].norm.linear.lora_B['default_0'], 140533242129008) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].norm.linear.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].norm.linear.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[30].norm.linear.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].norm.linear.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[30].norm.linear.base_layer, 140581770783008) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].norm.linear.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].norm.linear.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[30].norm.linear.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].norm.linear.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[30].norm.linear.lora_dropout, 140533242125168) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].norm.linear.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].norm.linear.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[30].norm.linear.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].norm.linear.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[30].norm.linear.lora_dropout['default_0'], 140533242130400) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].norm.linear.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].norm.linear.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[30].norm.linear.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].norm.linear.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[30].norm.linear.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[30].norm.linear.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].norm.linear.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[30].norm.linear.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].norm.linear.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[30].norm.linear.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[30].norm.linear.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].norm.linear.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[30].norm.linear.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].norm.linear._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].norm.linear._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].norm.linear.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[30].norm.linear.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[30].norm.linear.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].norm.linear._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[30].norm.linear._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].norm.linear._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].norm.linear._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].norm.linear._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[30].norm.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[30].norm.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].norm._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].norm._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].norm._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].norm._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].act_mlp, accessed_by=DictGetItemGuardAccessor(act_mlp) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[30].act_mlp, 140581770783152) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].act_mlp.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].act_mlp.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[30].act_mlp.training, 140591004393440) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].proj_mlp, accessed_by=DictGetItemGuardAccessor(proj_mlp) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[30].proj_mlp, 140533242131600) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].proj_mlp.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[30].proj_mlp.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].proj_mlp.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[30].proj_mlp.training, 140591004393408) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].proj_mlp._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].proj_mlp.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[30].proj_mlp.lora_A, 140533242131984) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].proj_mlp.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].proj_mlp.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[30].proj_mlp.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].proj_mlp.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[30].proj_mlp.lora_A['default_0'], 140533242131744) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].proj_mlp.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].proj_mlp.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[30].proj_mlp.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].proj_mlp.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].proj_mlp.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[30].proj_mlp.lora_A['default_0'].weight, 140531238516464) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].proj_mlp.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[30].proj_mlp.lora_B, 140533242126944) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].proj_mlp.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].proj_mlp.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[30].proj_mlp.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].proj_mlp.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[30].proj_mlp.lora_B['default_0'], 140533242131072) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].proj_mlp.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].proj_mlp.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[30].proj_mlp.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].proj_mlp.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[30].proj_mlp.base_layer, 140581770783104) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].proj_mlp.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].proj_mlp.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[30].proj_mlp.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].proj_mlp.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[30].proj_mlp.lora_dropout, 140533242134144) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].proj_mlp.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].proj_mlp.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[30].proj_mlp.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].proj_mlp.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[30].proj_mlp.lora_dropout['default_0'], 140533242132512) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].proj_mlp.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].proj_mlp.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[30].proj_mlp.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].proj_mlp.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[30].proj_mlp.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[30].proj_mlp.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].proj_mlp.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[30].proj_mlp.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].proj_mlp.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[30].proj_mlp.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[30].proj_mlp.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].proj_mlp.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[30].proj_mlp.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].proj_mlp._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].proj_mlp._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].proj_mlp.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[30].proj_mlp.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[30].proj_mlp.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].proj_mlp._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[30].proj_mlp._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].proj_mlp._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].proj_mlp._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].proj_mlp._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[30].proj_mlp._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[30].proj_mlp._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].proj_out, accessed_by=DictGetItemGuardAccessor(proj_out) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[30].proj_out, 140533242124976) # hidden_states = gate * self.proj_out(hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:98 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].proj_out.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[30].proj_out.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].proj_out.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[30].proj_out.training, 140591004393408) # hidden_states = gate * self.proj_out(hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:98 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].proj_out._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].proj_out.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[30].proj_out.lora_A, 140533242120752) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].proj_out.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].proj_out.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[30].proj_out.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].proj_out.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[30].proj_out.lora_A['default_0'], 140533242119120) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].proj_out.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].proj_out.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[30].proj_out.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].proj_out.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].proj_out.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[30].proj_out.lora_A['default_0'].weight, 140531238517344) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].proj_out.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[30].proj_out.lora_B, 140533242126272) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].proj_out.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].proj_out.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[30].proj_out.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].proj_out.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[30].proj_out.lora_B['default_0'], 140533242118400) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].proj_out.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].proj_out.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[30].proj_out.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].proj_out.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[30].proj_out.base_layer, 140581770783200) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].proj_out.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].proj_out.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[30].proj_out.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].proj_out.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[30].proj_out.lora_dropout, 140533242119840) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].proj_out.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].proj_out.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[30].proj_out.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].proj_out.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[30].proj_out.lora_dropout['default_0'], 140533242124016) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].proj_out.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].proj_out.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[30].proj_out.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].proj_out.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[30].proj_out.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[30].proj_out.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].proj_out.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[30].proj_out.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].proj_out.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[30].proj_out.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[30].proj_out.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].proj_out.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[30].proj_out.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].proj_out._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].proj_out._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].proj_out.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[30].proj_out.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[30].proj_out.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].proj_out._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[30].proj_out._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].proj_out._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].proj_out._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30].proj_out._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[30].proj_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[30].proj_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[30]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31], accessed_by=GetItemGuardAccessor(31) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[31], 140581770782864) # for index_block, block in enumerate(self.single_transformer_blocks): # diffusers/src/diffusers/models/transformers/transformer_flux.py:509 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[31].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[31].training, 140591004393440) # for index_block, block in enumerate(self.single_transformer_blocks): # diffusers/src/diffusers/models/transformers/transformer_flux.py:509 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].attn, accessed_by=DictGetItemGuardAccessor(attn) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[31].attn, 140581770784064) # attn_output = self.attn( # diffusers/src/diffusers/models/transformers/transformer_flux.py:91 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].attn.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[31].attn.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].attn.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[31].attn.training, 140591004393440) # attn_output = self.attn( # diffusers/src/diffusers/models/transformers/transformer_flux.py:91 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].attn._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].attn.to_k, accessed_by=DictGetItemGuardAccessor(to_k) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[31].attn.to_k, 140537202760272) # key = attn.to_k(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1717 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].attn.to_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[31].attn.to_k.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].attn.to_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[31].attn.to_k.training, 140591004393408) # key = attn.to_k(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1717 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].attn.to_k._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].attn.to_k.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[31].attn.to_k.lora_A, 140537202758784) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].attn.to_k.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].attn.to_k.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[31].attn.to_k.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].attn.to_k.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[31].attn.to_k.lora_A['default_0'], 140537202761664) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].attn.to_k.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].attn.to_k.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[31].attn.to_k.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].attn.to_k.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].attn.to_k.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[31].attn.to_k.lora_A['default_0'].weight, 140526678959552) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].attn.to_k.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[31].attn.to_k.lora_B, 140537202760368) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].attn.to_k.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].attn.to_k.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[31].attn.to_k.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].attn.to_k.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[31].attn.to_k.lora_B['default_0'], 140537202763200) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].attn.to_k.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].attn.to_k.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[31].attn.to_k.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].attn.to_k.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[31].attn.to_k.base_layer, 140581770784208) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].attn.to_k.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].attn.to_k.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[31].attn.to_k.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].attn.to_k.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[31].attn.to_k.lora_dropout, 140537202757104) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].attn.to_k.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].attn.to_k.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[31].attn.to_k.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].attn.to_k.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[31].attn.to_k.lora_dropout['default_0'], 140537202758688) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].attn.to_k.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].attn.to_k.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[31].attn.to_k.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].attn.to_k.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[31].attn.to_k.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[31].attn.to_k.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].attn.to_k.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[31].attn.to_k.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].attn.to_k.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[31].attn.to_k.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[31].attn.to_k.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].attn.to_k.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[31].attn.to_k.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].attn.to_k._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].attn.to_k._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].attn.to_k.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[31].attn.to_k.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[31].attn.to_k.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].attn.to_k._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[31].attn.to_k._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].attn.to_k._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].attn.to_k._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].attn.to_k._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[31].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[31].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].attn.to_q, accessed_by=DictGetItemGuardAccessor(to_q) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[31].attn.to_q, 140537202766944) # query = attn.to_q(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1716 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].attn.to_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[31].attn.to_q.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].attn.to_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[31].attn.to_q.training, 140591004393408) # query = attn.to_q(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1716 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].attn.to_q._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].attn.to_q.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[31].attn.to_q.lora_A, 140537202758064) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].attn.to_q.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].attn.to_q.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[31].attn.to_q.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].attn.to_q.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[31].attn.to_q.lora_A['default_0'], 140537202756048) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].attn.to_q.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].attn.to_q.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[31].attn.to_q.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].attn.to_q.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].attn.to_q.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[31].attn.to_q.lora_A['default_0'].weight, 140526678962752) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].attn.to_q.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[31].attn.to_q.lora_B, 140537202763824) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].attn.to_q.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].attn.to_q.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[31].attn.to_q.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].attn.to_q.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[31].attn.to_q.lora_B['default_0'], 140537202757776) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].attn.to_q.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].attn.to_q.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[31].attn.to_q.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].attn.to_q.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[31].attn.to_q.base_layer, 140581770784304) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].attn.to_q.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].attn.to_q.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[31].attn.to_q.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].attn.to_q.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[31].attn.to_q.lora_dropout, 140537202762624) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].attn.to_q.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].attn.to_q.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[31].attn.to_q.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].attn.to_q.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[31].attn.to_q.lora_dropout['default_0'], 140537202764160) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].attn.to_q.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].attn.to_q.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[31].attn.to_q.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].attn.to_q.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[31].attn.to_q.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[31].attn.to_q.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].attn.to_q.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[31].attn.to_q.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].attn.to_q.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[31].attn.to_q.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[31].attn.to_q.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].attn.to_q.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[31].attn.to_q.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].attn.to_q._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].attn.to_q._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].attn.to_q.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[31].attn.to_q.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[31].attn.to_q.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].attn.to_q._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[31].attn.to_q._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].attn.to_q._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].attn.to_q._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].attn.to_q._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[31].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[31].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].attn.to_v, accessed_by=DictGetItemGuardAccessor(to_v) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[31].attn.to_v, 140537202768720) # value = attn.to_v(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1718 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].attn.to_v.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[31].attn.to_v.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].attn.to_v.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[31].attn.to_v.training, 140591004393408) # value = attn.to_v(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1718 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].attn.to_v._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].attn.to_v.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[31].attn.to_v.lora_A, 140537202763488) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].attn.to_v.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].attn.to_v.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[31].attn.to_v.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].attn.to_v.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[31].attn.to_v.lora_A['default_0'], 140537202753888) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].attn.to_v.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].attn.to_v.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[31].attn.to_v.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].attn.to_v.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].attn.to_v.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[31].attn.to_v.lora_A['default_0'].weight, 140526678950432) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].attn.to_v.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[31].attn.to_v.lora_B, 140537202767184) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].attn.to_v.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].attn.to_v.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[31].attn.to_v.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].attn.to_v.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[31].attn.to_v.lora_B['default_0'], 140537202766992) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].attn.to_v.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].attn.to_v.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[31].attn.to_v.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].attn.to_v.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[31].attn.to_v.base_layer, 140581770784352) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].attn.to_v.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].attn.to_v.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[31].attn.to_v.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].attn.to_v.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[31].attn.to_v.lora_dropout, 140537202768288) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].attn.to_v.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].attn.to_v.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[31].attn.to_v.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].attn.to_v.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[31].attn.to_v.lora_dropout['default_0'], 140537202754848) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].attn.to_v.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].attn.to_v.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[31].attn.to_v.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].attn.to_v.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[31].attn.to_v.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[31].attn.to_v.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].attn.to_v.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[31].attn.to_v.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].attn.to_v.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[31].attn.to_v.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[31].attn.to_v.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].attn.to_v.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[31].attn.to_v.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].attn.to_v._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].attn.to_v._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].attn.to_v.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[31].attn.to_v.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[31].attn.to_v.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].attn.to_v._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[31].attn.to_v._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].attn.to_v._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].attn.to_v._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].attn.to_v._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[31].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[31].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].attn.norm_k, accessed_by=DictGetItemGuardAccessor(norm_k) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[31].attn.norm_k, 140581770784256) # if attn.norm_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1729 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].attn.norm_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[31].attn.norm_k.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].attn.norm_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[31].attn.norm_k.training, 140591004393440) # if attn.norm_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1729 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].attn.norm_k.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[31].attn.norm_k.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].attn.norm_k._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].attn.norm_k.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[31].attn.norm_k.weight, 140581783347232) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].attn.norm_k._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].attn.norm_k._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].attn.norm_k._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].attn.norm_k._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].attn.norm_q, accessed_by=DictGetItemGuardAccessor(norm_q) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[31].attn.norm_q, 140581770784112) # if attn.norm_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1727 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].attn.norm_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[31].attn.norm_q.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].attn.norm_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[31].attn.norm_q.training, 140591004393440) # if attn.norm_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1727 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].attn.norm_q.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[31].attn.norm_q.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].attn.norm_q._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].attn.norm_q.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[31].attn.norm_q.weight, 140581766104784) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].attn.norm_q._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].attn.norm_q._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].attn.norm_q._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].attn.norm_q._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].attn.heads, accessed_by=DictGetItemGuardAccessor(heads) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[31].attn.heads == 24 # head_dim = inner_dim // attn.heads # diffusers/src/diffusers/models/attention_processor.py:1721 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].attn.processor, accessed_by=DictGetItemGuardAccessor(processor) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[31].attn.processor, 93831581524080) # attn_parameters = set(inspect.signature(self.processor.__call__).parameters.keys()) # diffusers/src/diffusers/models/attention_processor.py:479 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[31].attn.processor, 140581770784016) # return self.processor( # diffusers/src/diffusers/models/attention_processor.py:490 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].attn._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].attn._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].attn._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].attn._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].attn.forward, accessed_by=GetAttrGuardAccessor(forward) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].attn.forward, accessed_by=FuncDefaultsGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].attn.forward.__defaults__[0], accessed_by=GetItemGuardAccessor(0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[31].attn.forward.__defaults__[0], 140591004478624) # batch_size, _, _ = hidden_states.shape if encoder_hidden_states is None else encoder_hidden_states.shape # diffusers/src/diffusers/models/attention_processor.py:1713 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].norm, accessed_by=DictGetItemGuardAccessor(norm) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[31].norm, 140581770783680) # norm_hidden_states, gate = self.norm(hidden_states, emb=temb) # diffusers/src/diffusers/models/transformers/transformer_flux.py:88 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].norm.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[31].norm.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].norm.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[31].norm.training, 140591004393440) # norm_hidden_states, gate = self.norm(hidden_states, emb=temb) # diffusers/src/diffusers/models/transformers/transformer_flux.py:88 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].norm._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].norm.norm, accessed_by=DictGetItemGuardAccessor(norm) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[31].norm.norm, 140581770783824) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:171 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].norm.norm.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].norm.norm.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[31].norm.norm.training, 140591004393440) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:171 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].norm.silu, accessed_by=DictGetItemGuardAccessor(silu) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[31].norm.silu, 140581770783728) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].norm.silu.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].norm.silu.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[31].norm.silu.training, 140591004393440) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].norm.linear, accessed_by=DictGetItemGuardAccessor(linear) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[31].norm.linear, 140537202945968) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].norm.linear.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[31].norm.linear.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].norm.linear.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[31].norm.linear.training, 140591004393408) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].norm.linear._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].norm.linear.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[31].norm.linear.lora_A, 140537202934016) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].norm.linear.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].norm.linear.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[31].norm.linear.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].norm.linear.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[31].norm.linear.lora_A['default_0'], 140537202938048) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].norm.linear.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].norm.linear.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[31].norm.linear.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].norm.linear.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].norm.linear.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[31].norm.linear.lora_A['default_0'].weight, 140526381250768) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].norm.linear.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[31].norm.linear.lora_B, 140537202934592) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].norm.linear.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].norm.linear.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[31].norm.linear.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].norm.linear.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[31].norm.linear.lora_B['default_0'], 140537202941984) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].norm.linear.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].norm.linear.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[31].norm.linear.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].norm.linear.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[31].norm.linear.base_layer, 140581770783776) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].norm.linear.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].norm.linear.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[31].norm.linear.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].norm.linear.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[31].norm.linear.lora_dropout, 140537202947168) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].norm.linear.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].norm.linear.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[31].norm.linear.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].norm.linear.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[31].norm.linear.lora_dropout['default_0'], 140537202935504) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].norm.linear.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].norm.linear.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[31].norm.linear.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].norm.linear.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[31].norm.linear.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[31].norm.linear.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].norm.linear.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[31].norm.linear.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].norm.linear.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[31].norm.linear.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[31].norm.linear.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].norm.linear.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[31].norm.linear.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].norm.linear._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].norm.linear._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].norm.linear.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[31].norm.linear.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[31].norm.linear.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].norm.linear._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[31].norm.linear._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].norm.linear._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].norm.linear._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].norm.linear._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[31].norm.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[31].norm.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].norm._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].norm._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].norm._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].norm._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].act_mlp, accessed_by=DictGetItemGuardAccessor(act_mlp) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[31].act_mlp, 140581770783920) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].act_mlp.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].act_mlp.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[31].act_mlp.training, 140591004393440) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].proj_mlp, accessed_by=DictGetItemGuardAccessor(proj_mlp) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[31].proj_mlp, 140537202934496) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].proj_mlp.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[31].proj_mlp.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].proj_mlp.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[31].proj_mlp.training, 140591004393408) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].proj_mlp._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].proj_mlp.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[31].proj_mlp.lora_A, 140537202942272) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].proj_mlp.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].proj_mlp.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[31].proj_mlp.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].proj_mlp.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[31].proj_mlp.lora_A['default_0'], 140537202941312) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].proj_mlp.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].proj_mlp.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[31].proj_mlp.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].proj_mlp.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].proj_mlp.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[31].proj_mlp.lora_A['default_0'].weight, 140531261621728) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].proj_mlp.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[31].proj_mlp.lora_B, 140537202934256) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].proj_mlp.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].proj_mlp.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[31].proj_mlp.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].proj_mlp.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[31].proj_mlp.lora_B['default_0'], 140537202937712) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].proj_mlp.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].proj_mlp.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[31].proj_mlp.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].proj_mlp.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[31].proj_mlp.base_layer, 140581770783872) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].proj_mlp.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].proj_mlp.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[31].proj_mlp.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].proj_mlp.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[31].proj_mlp.lora_dropout, 140537202942896) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].proj_mlp.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].proj_mlp.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[31].proj_mlp.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].proj_mlp.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[31].proj_mlp.lora_dropout['default_0'], 140537202935216) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].proj_mlp.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].proj_mlp.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[31].proj_mlp.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].proj_mlp.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[31].proj_mlp.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[31].proj_mlp.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].proj_mlp.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[31].proj_mlp.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].proj_mlp.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[31].proj_mlp.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[31].proj_mlp.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].proj_mlp.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[31].proj_mlp.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].proj_mlp._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].proj_mlp._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].proj_mlp.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[31].proj_mlp.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[31].proj_mlp.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].proj_mlp._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[31].proj_mlp._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].proj_mlp._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].proj_mlp._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].proj_mlp._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[31].proj_mlp._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[31].proj_mlp._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].proj_out, accessed_by=DictGetItemGuardAccessor(proj_out) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[31].proj_out, 140537202941504) # hidden_states = gate * self.proj_out(hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:98 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].proj_out.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[31].proj_out.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].proj_out.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[31].proj_out.training, 140591004393408) # hidden_states = gate * self.proj_out(hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:98 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].proj_out._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].proj_out.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[31].proj_out.lora_A, 140537202766176) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].proj_out.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].proj_out.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[31].proj_out.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].proj_out.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[31].proj_out.lora_A['default_0'], 140537202764736) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].proj_out.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].proj_out.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[31].proj_out.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].proj_out.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].proj_out.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[31].proj_out.lora_A['default_0'].weight, 140526678957152) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].proj_out.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[31].proj_out.lora_B, 140537202755520) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].proj_out.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].proj_out.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[31].proj_out.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].proj_out.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[31].proj_out.lora_B['default_0'], 140537202765120) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].proj_out.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].proj_out.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[31].proj_out.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].proj_out.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[31].proj_out.base_layer, 140581770783968) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].proj_out.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].proj_out.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[31].proj_out.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].proj_out.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[31].proj_out.lora_dropout, 140537202945440) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].proj_out.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].proj_out.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[31].proj_out.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].proj_out.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[31].proj_out.lora_dropout['default_0'], 140537202939968) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].proj_out.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].proj_out.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[31].proj_out.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].proj_out.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[31].proj_out.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[31].proj_out.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].proj_out.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[31].proj_out.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].proj_out.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[31].proj_out.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[31].proj_out.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].proj_out.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[31].proj_out.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].proj_out._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].proj_out._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].proj_out.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[31].proj_out.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[31].proj_out.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].proj_out._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[31].proj_out._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].proj_out._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].proj_out._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31].proj_out._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[31].proj_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[31].proj_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[31]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32], accessed_by=GetItemGuardAccessor(32) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[32], 140581770783632) # for index_block, block in enumerate(self.single_transformer_blocks): # diffusers/src/diffusers/models/transformers/transformer_flux.py:509 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[32].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[32].training, 140591004393440) # for index_block, block in enumerate(self.single_transformer_blocks): # diffusers/src/diffusers/models/transformers/transformer_flux.py:509 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].attn, accessed_by=DictGetItemGuardAccessor(attn) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[32].attn, 140581770784832) # attn_output = self.attn( # diffusers/src/diffusers/models/transformers/transformer_flux.py:91 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].attn.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[32].attn.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].attn.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[32].attn.training, 140591004393440) # attn_output = self.attn( # diffusers/src/diffusers/models/transformers/transformer_flux.py:91 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].attn._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].attn.to_k, accessed_by=DictGetItemGuardAccessor(to_k) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[32].attn.to_k, 140537203441776) # key = attn.to_k(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1717 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].attn.to_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[32].attn.to_k.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].attn.to_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[32].attn.to_k.training, 140591004393408) # key = attn.to_k(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1717 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].attn.to_k._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].attn.to_k.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[32].attn.to_k.lora_A, 140537203445712) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].attn.to_k.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].attn.to_k.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[32].attn.to_k.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].attn.to_k.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[32].attn.to_k.lora_A['default_0'], 140537203453632) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].attn.to_k.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].attn.to_k.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[32].attn.to_k.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].attn.to_k.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].attn.to_k.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[32].attn.to_k.lora_A['default_0'].weight, 140526678820480) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].attn.to_k.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[32].attn.to_k.lora_B, 140537203447632) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].attn.to_k.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].attn.to_k.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[32].attn.to_k.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].attn.to_k.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[32].attn.to_k.lora_B['default_0'], 140537203456896) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].attn.to_k.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].attn.to_k.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[32].attn.to_k.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].attn.to_k.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[32].attn.to_k.base_layer, 140581770784976) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].attn.to_k.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].attn.to_k.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[32].attn.to_k.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].attn.to_k.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[32].attn.to_k.lora_dropout, 140537203443552) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].attn.to_k.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].attn.to_k.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[32].attn.to_k.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].attn.to_k.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[32].attn.to_k.lora_dropout['default_0'], 140537203441872) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].attn.to_k.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].attn.to_k.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[32].attn.to_k.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].attn.to_k.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[32].attn.to_k.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[32].attn.to_k.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].attn.to_k.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[32].attn.to_k.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].attn.to_k.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[32].attn.to_k.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[32].attn.to_k.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].attn.to_k.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[32].attn.to_k.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].attn.to_k._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].attn.to_k._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].attn.to_k.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[32].attn.to_k.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[32].attn.to_k.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].attn.to_k._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[32].attn.to_k._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].attn.to_k._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].attn.to_k._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].attn.to_k._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[32].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[32].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].attn.to_q, accessed_by=DictGetItemGuardAccessor(to_q) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[32].attn.to_q, 140537202074640) # query = attn.to_q(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1716 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].attn.to_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[32].attn.to_q.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].attn.to_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[32].attn.to_q.training, 140591004393408) # query = attn.to_q(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1716 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].attn.to_q._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].attn.to_q.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[32].attn.to_q.lora_A, 140537202075408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].attn.to_q.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].attn.to_q.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[32].attn.to_q.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].attn.to_q.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[32].attn.to_q.lora_A['default_0'], 140537203448448) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].attn.to_q.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].attn.to_q.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[32].attn.to_q.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].attn.to_q.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].attn.to_q.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[32].attn.to_q.lora_A['default_0'].weight, 140526658925040) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].attn.to_q.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[32].attn.to_q.lora_B, 140537202072144) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].attn.to_q.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].attn.to_q.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[32].attn.to_q.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].attn.to_q.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[32].attn.to_q.lora_B['default_0'], 140537203442016) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].attn.to_q.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].attn.to_q.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[32].attn.to_q.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].attn.to_q.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[32].attn.to_q.base_layer, 140581770785072) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].attn.to_q.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].attn.to_q.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[32].attn.to_q.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].attn.to_q.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[32].attn.to_q.lora_dropout, 140537202080928) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].attn.to_q.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].attn.to_q.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[32].attn.to_q.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].attn.to_q.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[32].attn.to_q.lora_dropout['default_0'], 140537202071328) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].attn.to_q.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].attn.to_q.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[32].attn.to_q.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].attn.to_q.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[32].attn.to_q.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[32].attn.to_q.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].attn.to_q.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[32].attn.to_q.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].attn.to_q.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[32].attn.to_q.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[32].attn.to_q.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].attn.to_q.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[32].attn.to_q.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].attn.to_q._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].attn.to_q._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].attn.to_q.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[32].attn.to_q.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[32].attn.to_q.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].attn.to_q._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[32].attn.to_q._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].attn.to_q._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].attn.to_q._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].attn.to_q._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[32].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[32].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].attn.to_v, accessed_by=DictGetItemGuardAccessor(to_v) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[32].attn.to_v, 140537203450416) # value = attn.to_v(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1718 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].attn.to_v.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[32].attn.to_v.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].attn.to_v.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[32].attn.to_v.training, 140591004393408) # value = attn.to_v(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1718 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].attn.to_v._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].attn.to_v.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[32].attn.to_v.lora_A, 140537203457760) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].attn.to_v.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].attn.to_v.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[32].attn.to_v.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].attn.to_v.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[32].attn.to_v.lora_A['default_0'], 140537203444944) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].attn.to_v.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].attn.to_v.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[32].attn.to_v.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].attn.to_v.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].attn.to_v.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[32].attn.to_v.lora_A['default_0'].weight, 140526678819200) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].attn.to_v.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[32].attn.to_v.lora_B, 140537203449456) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].attn.to_v.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].attn.to_v.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[32].attn.to_v.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].attn.to_v.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[32].attn.to_v.lora_B['default_0'], 140537203452096) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].attn.to_v.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].attn.to_v.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[32].attn.to_v.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].attn.to_v.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[32].attn.to_v.base_layer, 140581770785120) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].attn.to_v.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].attn.to_v.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[32].attn.to_v.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].attn.to_v.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[32].attn.to_v.lora_dropout, 140537203456992) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].attn.to_v.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].attn.to_v.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[32].attn.to_v.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].attn.to_v.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[32].attn.to_v.lora_dropout['default_0'], 140537203448784) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].attn.to_v.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].attn.to_v.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[32].attn.to_v.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].attn.to_v.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[32].attn.to_v.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[32].attn.to_v.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].attn.to_v.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[32].attn.to_v.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].attn.to_v.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[32].attn.to_v.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[32].attn.to_v.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].attn.to_v.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[32].attn.to_v.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].attn.to_v._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].attn.to_v._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].attn.to_v.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[32].attn.to_v.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[32].attn.to_v.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].attn.to_v._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[32].attn.to_v._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].attn.to_v._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].attn.to_v._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].attn.to_v._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[32].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[32].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].attn.norm_k, accessed_by=DictGetItemGuardAccessor(norm_k) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[32].attn.norm_k, 140581770785024) # if attn.norm_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1729 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].attn.norm_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[32].attn.norm_k.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].attn.norm_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[32].attn.norm_k.training, 140591004393440) # if attn.norm_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1729 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].attn.norm_k.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[32].attn.norm_k.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].attn.norm_k._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].attn.norm_k.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[32].attn.norm_k.weight, 140581773255808) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].attn.norm_k._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].attn.norm_k._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].attn.norm_k._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].attn.norm_k._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].attn.norm_q, accessed_by=DictGetItemGuardAccessor(norm_q) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[32].attn.norm_q, 140581770784880) # if attn.norm_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1727 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].attn.norm_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[32].attn.norm_q.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].attn.norm_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[32].attn.norm_q.training, 140591004393440) # if attn.norm_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1727 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].attn.norm_q.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[32].attn.norm_q.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].attn.norm_q._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].attn.norm_q.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[32].attn.norm_q.weight, 140581783349712) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].attn.norm_q._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].attn.norm_q._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].attn.norm_q._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].attn.norm_q._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].attn.heads, accessed_by=DictGetItemGuardAccessor(heads) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[32].attn.heads == 24 # head_dim = inner_dim // attn.heads # diffusers/src/diffusers/models/attention_processor.py:1721 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].attn.processor, accessed_by=DictGetItemGuardAccessor(processor) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[32].attn.processor, 93831581524080) # attn_parameters = set(inspect.signature(self.processor.__call__).parameters.keys()) # diffusers/src/diffusers/models/attention_processor.py:479 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[32].attn.processor, 140581770784784) # return self.processor( # diffusers/src/diffusers/models/attention_processor.py:490 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].attn._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].attn._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].attn._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].attn._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].attn.forward, accessed_by=GetAttrGuardAccessor(forward) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].attn.forward, accessed_by=FuncDefaultsGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].attn.forward.__defaults__[0], accessed_by=GetItemGuardAccessor(0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[32].attn.forward.__defaults__[0], 140591004478624) # batch_size, _, _ = hidden_states.shape if encoder_hidden_states is None else encoder_hidden_states.shape # diffusers/src/diffusers/models/attention_processor.py:1713 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].norm, accessed_by=DictGetItemGuardAccessor(norm) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[32].norm, 140581770784448) # norm_hidden_states, gate = self.norm(hidden_states, emb=temb) # diffusers/src/diffusers/models/transformers/transformer_flux.py:88 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].norm.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[32].norm.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].norm.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[32].norm.training, 140591004393440) # norm_hidden_states, gate = self.norm(hidden_states, emb=temb) # diffusers/src/diffusers/models/transformers/transformer_flux.py:88 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].norm._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].norm.norm, accessed_by=DictGetItemGuardAccessor(norm) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[32].norm.norm, 140581770784592) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:171 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].norm.norm.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].norm.norm.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[32].norm.norm.training, 140591004393440) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:171 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].norm.silu, accessed_by=DictGetItemGuardAccessor(silu) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[32].norm.silu, 140581770784496) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].norm.silu.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].norm.silu.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[32].norm.silu.training, 140591004393440) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].norm.linear, accessed_by=DictGetItemGuardAccessor(linear) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[32].norm.linear, 140537202080064) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].norm.linear.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[32].norm.linear.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].norm.linear.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[32].norm.linear.training, 140591004393408) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].norm.linear._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].norm.linear.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[32].norm.linear.lora_A, 140537202072336) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].norm.linear.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].norm.linear.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[32].norm.linear.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].norm.linear.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[32].norm.linear.lora_A['default_0'], 140537202075792) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].norm.linear.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].norm.linear.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[32].norm.linear.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].norm.linear.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].norm.linear.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[32].norm.linear.lora_A['default_0'].weight, 140526658925840) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].norm.linear.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[32].norm.linear.lora_B, 140537202079488) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].norm.linear.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].norm.linear.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[32].norm.linear.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].norm.linear.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[32].norm.linear.lora_B['default_0'], 140537202073920) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].norm.linear.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].norm.linear.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[32].norm.linear.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].norm.linear.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[32].norm.linear.base_layer, 140581770784544) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].norm.linear.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].norm.linear.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[32].norm.linear.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].norm.linear.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[32].norm.linear.lora_dropout, 140537202067968) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].norm.linear.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].norm.linear.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[32].norm.linear.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].norm.linear.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[32].norm.linear.lora_dropout['default_0'], 140537202072720) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].norm.linear.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].norm.linear.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[32].norm.linear.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].norm.linear.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[32].norm.linear.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[32].norm.linear.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].norm.linear.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[32].norm.linear.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].norm.linear.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[32].norm.linear.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[32].norm.linear.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].norm.linear.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[32].norm.linear.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].norm.linear._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].norm.linear._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].norm.linear.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[32].norm.linear.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[32].norm.linear.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].norm.linear._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[32].norm.linear._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].norm.linear._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].norm.linear._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].norm.linear._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[32].norm.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[32].norm.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].norm._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].norm._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].norm._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].norm._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].act_mlp, accessed_by=DictGetItemGuardAccessor(act_mlp) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[32].act_mlp, 140581770784688) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].act_mlp.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].act_mlp.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[32].act_mlp.training, 140591004393440) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].proj_mlp, accessed_by=DictGetItemGuardAccessor(proj_mlp) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[32].proj_mlp, 140537202077136) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].proj_mlp.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[32].proj_mlp.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].proj_mlp.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[32].proj_mlp.training, 140591004393408) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].proj_mlp._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].proj_mlp.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[32].proj_mlp.lora_A, 140537202070512) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].proj_mlp.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].proj_mlp.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[32].proj_mlp.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].proj_mlp.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[32].proj_mlp.lora_A['default_0'], 140537202067728) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].proj_mlp.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].proj_mlp.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[32].proj_mlp.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].proj_mlp.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].proj_mlp.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[32].proj_mlp.lora_A['default_0'].weight, 140526658913040) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].proj_mlp.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[32].proj_mlp.lora_B, 140537202076080) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].proj_mlp.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].proj_mlp.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[32].proj_mlp.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].proj_mlp.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[32].proj_mlp.lora_B['default_0'], 140537202068784) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].proj_mlp.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].proj_mlp.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[32].proj_mlp.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].proj_mlp.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[32].proj_mlp.base_layer, 140581770784640) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].proj_mlp.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].proj_mlp.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[32].proj_mlp.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].proj_mlp.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[32].proj_mlp.lora_dropout, 140537202075504) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].proj_mlp.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].proj_mlp.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[32].proj_mlp.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].proj_mlp.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[32].proj_mlp.lora_dropout['default_0'], 140537202074160) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].proj_mlp.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].proj_mlp.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[32].proj_mlp.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].proj_mlp.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[32].proj_mlp.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[32].proj_mlp.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].proj_mlp.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[32].proj_mlp.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].proj_mlp.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[32].proj_mlp.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[32].proj_mlp.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].proj_mlp.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[32].proj_mlp.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].proj_mlp._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].proj_mlp._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].proj_mlp.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[32].proj_mlp.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[32].proj_mlp.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].proj_mlp._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[32].proj_mlp._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].proj_mlp._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].proj_mlp._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].proj_mlp._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[32].proj_mlp._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[32].proj_mlp._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].proj_out, accessed_by=DictGetItemGuardAccessor(proj_out) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[32].proj_out, 140537202070608) # hidden_states = gate * self.proj_out(hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:98 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].proj_out.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[32].proj_out.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].proj_out.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[32].proj_out.training, 140591004393408) # hidden_states = gate * self.proj_out(hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:98 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].proj_out._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].proj_out.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[32].proj_out.lora_A, 140537202067344) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].proj_out.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].proj_out.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[32].proj_out.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].proj_out.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[32].proj_out.lora_A['default_0'], 140537202073440) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].proj_out.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].proj_out.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[32].proj_out.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].proj_out.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].proj_out.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[32].proj_out.lora_A['default_0'].weight, 140526658912640) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].proj_out.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[32].proj_out.lora_B, 140537202068976) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].proj_out.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].proj_out.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[32].proj_out.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].proj_out.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[32].proj_out.lora_B['default_0'], 140537202072288) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].proj_out.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].proj_out.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[32].proj_out.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].proj_out.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[32].proj_out.base_layer, 140581770784736) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].proj_out.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].proj_out.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[32].proj_out.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].proj_out.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[32].proj_out.lora_dropout, 140537202069168) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].proj_out.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].proj_out.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[32].proj_out.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].proj_out.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[32].proj_out.lora_dropout['default_0'], 140537202071280) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].proj_out.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].proj_out.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[32].proj_out.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].proj_out.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[32].proj_out.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[32].proj_out.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].proj_out.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[32].proj_out.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].proj_out.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[32].proj_out.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[32].proj_out.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].proj_out.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[32].proj_out.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].proj_out._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].proj_out._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].proj_out.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[32].proj_out.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[32].proj_out.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].proj_out._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[32].proj_out._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].proj_out._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].proj_out._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32].proj_out._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[32].proj_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[32].proj_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[32]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33], accessed_by=GetItemGuardAccessor(33) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[33], 140581770784400) # for index_block, block in enumerate(self.single_transformer_blocks): # diffusers/src/diffusers/models/transformers/transformer_flux.py:509 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[33].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[33].training, 140591004393440) # for index_block, block in enumerate(self.single_transformer_blocks): # diffusers/src/diffusers/models/transformers/transformer_flux.py:509 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].attn, accessed_by=DictGetItemGuardAccessor(attn) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[33].attn, 140581770785600) # attn_output = self.attn( # diffusers/src/diffusers/models/transformers/transformer_flux.py:91 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].attn.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[33].attn.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].attn.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[33].attn.training, 140591004393440) # attn_output = self.attn( # diffusers/src/diffusers/models/transformers/transformer_flux.py:91 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].attn._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].attn.to_k, accessed_by=DictGetItemGuardAccessor(to_k) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[33].attn.to_k, 140533235847408) # key = attn.to_k(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1717 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].attn.to_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[33].attn.to_k.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].attn.to_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[33].attn.to_k.training, 140591004393408) # key = attn.to_k(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1717 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].attn.to_k._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].attn.to_k.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[33].attn.to_k.lora_A, 140531279706624) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].attn.to_k.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].attn.to_k.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[33].attn.to_k.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].attn.to_k.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[33].attn.to_k.lora_A['default_0'], 140531276596896) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].attn.to_k.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].attn.to_k.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[33].attn.to_k.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].attn.to_k.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].attn.to_k.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[33].attn.to_k.lora_A['default_0'].weight, 140526264846192) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].attn.to_k.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[33].attn.to_k.lora_B, 140531279932592) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].attn.to_k.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].attn.to_k.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[33].attn.to_k.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].attn.to_k.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[33].attn.to_k.lora_B['default_0'], 140531276596656) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].attn.to_k.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].attn.to_k.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[33].attn.to_k.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].attn.to_k.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[33].attn.to_k.base_layer, 140581770785744) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].attn.to_k.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].attn.to_k.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[33].attn.to_k.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].attn.to_k.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[33].attn.to_k.lora_dropout, 140531279436608) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].attn.to_k.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].attn.to_k.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[33].attn.to_k.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].attn.to_k.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[33].attn.to_k.lora_dropout['default_0'], 140531279861968) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].attn.to_k.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].attn.to_k.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[33].attn.to_k.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].attn.to_k.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[33].attn.to_k.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[33].attn.to_k.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].attn.to_k.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[33].attn.to_k.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].attn.to_k.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[33].attn.to_k.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[33].attn.to_k.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].attn.to_k.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[33].attn.to_k.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].attn.to_k._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].attn.to_k._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].attn.to_k.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[33].attn.to_k.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[33].attn.to_k.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].attn.to_k._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[33].attn.to_k._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].attn.to_k._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].attn.to_k._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].attn.to_k._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[33].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[33].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].attn.to_q, accessed_by=DictGetItemGuardAccessor(to_q) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[33].attn.to_q, 140531277644128) # query = attn.to_q(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1716 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].attn.to_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[33].attn.to_q.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].attn.to_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[33].attn.to_q.training, 140591004393408) # query = attn.to_q(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1716 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].attn.to_q._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].attn.to_q.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[33].attn.to_q.lora_A, 140531277647728) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].attn.to_q.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].attn.to_q.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[33].attn.to_q.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].attn.to_q.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[33].attn.to_q.lora_A['default_0'], 140531277498304) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].attn.to_q.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].attn.to_q.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[33].attn.to_q.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].attn.to_q.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].attn.to_q.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[33].attn.to_q.lora_A['default_0'].weight, 140526264857232) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].attn.to_q.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[33].attn.to_q.lora_B, 140531277507760) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].attn.to_q.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].attn.to_q.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[33].attn.to_q.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].attn.to_q.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[33].attn.to_q.lora_B['default_0'], 140531277504208) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].attn.to_q.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].attn.to_q.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[33].attn.to_q.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].attn.to_q.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[33].attn.to_q.base_layer, 140581770785840) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].attn.to_q.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].attn.to_q.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[33].attn.to_q.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].attn.to_q.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[33].attn.to_q.lora_dropout, 140531277651664) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].attn.to_q.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].attn.to_q.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[33].attn.to_q.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].attn.to_q.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[33].attn.to_q.lora_dropout['default_0'], 140531277658960) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].attn.to_q.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].attn.to_q.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[33].attn.to_q.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].attn.to_q.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[33].attn.to_q.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[33].attn.to_q.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].attn.to_q.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[33].attn.to_q.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].attn.to_q.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[33].attn.to_q.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[33].attn.to_q.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].attn.to_q.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[33].attn.to_q.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].attn.to_q._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].attn.to_q._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].attn.to_q.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[33].attn.to_q.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[33].attn.to_q.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].attn.to_q._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[33].attn.to_q._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].attn.to_q._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].attn.to_q._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].attn.to_q._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[33].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[33].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].attn.to_v, accessed_by=DictGetItemGuardAccessor(to_v) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[33].attn.to_v, 140531277497728) # value = attn.to_v(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1718 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].attn.to_v.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[33].attn.to_v.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].attn.to_v.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[33].attn.to_v.training, 140591004393408) # value = attn.to_v(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1718 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].attn.to_v._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].attn.to_v.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[33].attn.to_v.lora_A, 140531276474176) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].attn.to_v.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].attn.to_v.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[33].attn.to_v.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].attn.to_v.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[33].attn.to_v.lora_A['default_0'], 140531277005680) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].attn.to_v.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].attn.to_v.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[33].attn.to_v.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].attn.to_v.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].attn.to_v.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[33].attn.to_v.lora_A['default_0'].weight, 140526264852032) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].attn.to_v.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[33].attn.to_v.lora_B, 140531278435984) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].attn.to_v.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].attn.to_v.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[33].attn.to_v.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].attn.to_v.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[33].attn.to_v.lora_B['default_0'], 140531277011584) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].attn.to_v.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].attn.to_v.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[33].attn.to_v.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].attn.to_v.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[33].attn.to_v.base_layer, 140581770785888) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].attn.to_v.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].attn.to_v.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[33].attn.to_v.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].attn.to_v.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[33].attn.to_v.lora_dropout, 140531276472592) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].attn.to_v.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].attn.to_v.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[33].attn.to_v.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].attn.to_v.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[33].attn.to_v.lora_dropout['default_0'], 140531276478976) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].attn.to_v.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].attn.to_v.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[33].attn.to_v.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].attn.to_v.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[33].attn.to_v.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[33].attn.to_v.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].attn.to_v.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[33].attn.to_v.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].attn.to_v.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[33].attn.to_v.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[33].attn.to_v.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].attn.to_v.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[33].attn.to_v.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].attn.to_v._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].attn.to_v._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].attn.to_v.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[33].attn.to_v.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[33].attn.to_v.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].attn.to_v._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[33].attn.to_v._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].attn.to_v._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].attn.to_v._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].attn.to_v._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[33].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[33].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].attn.norm_k, accessed_by=DictGetItemGuardAccessor(norm_k) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[33].attn.norm_k, 140581770785792) # if attn.norm_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1729 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].attn.norm_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[33].attn.norm_k.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].attn.norm_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[33].attn.norm_k.training, 140591004393440) # if attn.norm_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1729 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].attn.norm_k.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[33].attn.norm_k.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].attn.norm_k._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].attn.norm_k.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[33].attn.norm_k.weight, 140581766107504) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].attn.norm_k._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].attn.norm_k._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].attn.norm_k._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].attn.norm_k._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].attn.norm_q, accessed_by=DictGetItemGuardAccessor(norm_q) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[33].attn.norm_q, 140581770785648) # if attn.norm_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1727 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].attn.norm_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[33].attn.norm_q.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].attn.norm_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[33].attn.norm_q.training, 140591004393440) # if attn.norm_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1727 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].attn.norm_q.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[33].attn.norm_q.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].attn.norm_q._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].attn.norm_q.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[33].attn.norm_q.weight, 140581773258288) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].attn.norm_q._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].attn.norm_q._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].attn.norm_q._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].attn.norm_q._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].attn.heads, accessed_by=DictGetItemGuardAccessor(heads) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[33].attn.heads == 24 # head_dim = inner_dim // attn.heads # diffusers/src/diffusers/models/attention_processor.py:1721 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].attn.processor, accessed_by=DictGetItemGuardAccessor(processor) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[33].attn.processor, 93831581524080) # attn_parameters = set(inspect.signature(self.processor.__call__).parameters.keys()) # diffusers/src/diffusers/models/attention_processor.py:479 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[33].attn.processor, 140581770785552) # return self.processor( # diffusers/src/diffusers/models/attention_processor.py:490 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].attn._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].attn._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].attn._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].attn._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].attn.forward, accessed_by=GetAttrGuardAccessor(forward) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].attn.forward, accessed_by=FuncDefaultsGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].attn.forward.__defaults__[0], accessed_by=GetItemGuardAccessor(0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[33].attn.forward.__defaults__[0], 140591004478624) # batch_size, _, _ = hidden_states.shape if encoder_hidden_states is None else encoder_hidden_states.shape # diffusers/src/diffusers/models/attention_processor.py:1713 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].norm, accessed_by=DictGetItemGuardAccessor(norm) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[33].norm, 140581770785216) # norm_hidden_states, gate = self.norm(hidden_states, emb=temb) # diffusers/src/diffusers/models/transformers/transformer_flux.py:88 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].norm.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[33].norm.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].norm.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[33].norm.training, 140591004393440) # norm_hidden_states, gate = self.norm(hidden_states, emb=temb) # diffusers/src/diffusers/models/transformers/transformer_flux.py:88 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].norm._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].norm.norm, accessed_by=DictGetItemGuardAccessor(norm) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[33].norm.norm, 140581770785360) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:171 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].norm.norm.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].norm.norm.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[33].norm.norm.training, 140591004393440) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:171 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].norm.silu, accessed_by=DictGetItemGuardAccessor(silu) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[33].norm.silu, 140581770785264) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].norm.silu.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].norm.silu.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[33].norm.silu.training, 140591004393440) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].norm.linear, accessed_by=DictGetItemGuardAccessor(linear) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[33].norm.linear, 140537203456944) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].norm.linear.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[33].norm.linear.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].norm.linear.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[33].norm.linear.training, 140591004393408) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].norm.linear._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].norm.linear.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[33].norm.linear.lora_A, 140537203442064) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].norm.linear.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].norm.linear.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[33].norm.linear.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].norm.linear.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[33].norm.linear.lora_A['default_0'], 140537203443504) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].norm.linear.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].norm.linear.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[33].norm.linear.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].norm.linear.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].norm.linear.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[33].norm.linear.lora_A['default_0'].weight, 140526264858432) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].norm.linear.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[33].norm.linear.lora_B, 140537203443360) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].norm.linear.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].norm.linear.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[33].norm.linear.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].norm.linear.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[33].norm.linear.lora_B['default_0'], 140537203442112) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].norm.linear.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].norm.linear.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[33].norm.linear.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].norm.linear.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[33].norm.linear.base_layer, 140581770785312) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].norm.linear.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].norm.linear.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[33].norm.linear.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].norm.linear.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[33].norm.linear.lora_dropout, 140537203452144) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].norm.linear.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].norm.linear.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[33].norm.linear.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].norm.linear.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[33].norm.linear.lora_dropout['default_0'], 140537203443744) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].norm.linear.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].norm.linear.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[33].norm.linear.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].norm.linear.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[33].norm.linear.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[33].norm.linear.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].norm.linear.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[33].norm.linear.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].norm.linear.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[33].norm.linear.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[33].norm.linear.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].norm.linear.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[33].norm.linear.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].norm.linear._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].norm.linear._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].norm.linear.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[33].norm.linear.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[33].norm.linear.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].norm.linear._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[33].norm.linear._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].norm.linear._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].norm.linear._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].norm.linear._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[33].norm.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[33].norm.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].norm._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].norm._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].norm._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].norm._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].act_mlp, accessed_by=DictGetItemGuardAccessor(act_mlp) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[33].act_mlp, 140581770785456) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].act_mlp.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].act_mlp.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[33].act_mlp.training, 140591004393440) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].proj_mlp, accessed_by=DictGetItemGuardAccessor(proj_mlp) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[33].proj_mlp, 140533122957952) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].proj_mlp.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[33].proj_mlp.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].proj_mlp.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[33].proj_mlp.training, 140591004393408) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].proj_mlp._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].proj_mlp.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[33].proj_mlp.lora_A, 140533122959248) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].proj_mlp.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].proj_mlp.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[33].proj_mlp.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].proj_mlp.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[33].proj_mlp.lora_A['default_0'], 140526534338656) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].proj_mlp.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].proj_mlp.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[33].proj_mlp.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].proj_mlp.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].proj_mlp.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[33].proj_mlp.lora_A['default_0'].weight, 140526264854192) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].proj_mlp.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[33].proj_mlp.lora_B, 140533122960064) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].proj_mlp.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].proj_mlp.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[33].proj_mlp.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].proj_mlp.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[33].proj_mlp.lora_B['default_0'], 140526534339568) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].proj_mlp.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].proj_mlp.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[33].proj_mlp.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].proj_mlp.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[33].proj_mlp.base_layer, 140581770785408) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].proj_mlp.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].proj_mlp.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[33].proj_mlp.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].proj_mlp.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[33].proj_mlp.lora_dropout, 140533122970480) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].proj_mlp.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].proj_mlp.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[33].proj_mlp.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].proj_mlp.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[33].proj_mlp.lora_dropout['default_0'], 140533122959584) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].proj_mlp.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].proj_mlp.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[33].proj_mlp.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].proj_mlp.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[33].proj_mlp.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[33].proj_mlp.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].proj_mlp.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[33].proj_mlp.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].proj_mlp.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[33].proj_mlp.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[33].proj_mlp.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].proj_mlp.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[33].proj_mlp.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].proj_mlp._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].proj_mlp._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].proj_mlp.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[33].proj_mlp.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[33].proj_mlp.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].proj_mlp._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[33].proj_mlp._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].proj_mlp._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].proj_mlp._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].proj_mlp._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[33].proj_mlp._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[33].proj_mlp._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].proj_out, accessed_by=DictGetItemGuardAccessor(proj_out) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[33].proj_out, 140526538781360) # hidden_states = gate * self.proj_out(hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:98 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].proj_out.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[33].proj_out.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].proj_out.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[33].proj_out.training, 140591004393408) # hidden_states = gate * self.proj_out(hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:98 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].proj_out._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].proj_out.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[33].proj_out.lora_A, 140531277655696) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].proj_out.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].proj_out.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[33].proj_out.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].proj_out.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[33].proj_out.lora_A['default_0'], 140531277648208) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].proj_out.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].proj_out.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[33].proj_out.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].proj_out.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].proj_out.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[33].proj_out.lora_A['default_0'].weight, 140526264847232) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].proj_out.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[33].proj_out.lora_B, 140531277659008) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].proj_out.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].proj_out.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[33].proj_out.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].proj_out.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[33].proj_out.lora_B['default_0'], 140531277658816) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].proj_out.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].proj_out.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[33].proj_out.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].proj_out.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[33].proj_out.base_layer, 140581770785504) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].proj_out.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].proj_out.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[33].proj_out.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].proj_out.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[33].proj_out.lora_dropout, 140531277660064) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].proj_out.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].proj_out.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[33].proj_out.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].proj_out.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[33].proj_out.lora_dropout['default_0'], 140531277647536) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].proj_out.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].proj_out.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[33].proj_out.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].proj_out.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[33].proj_out.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[33].proj_out.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].proj_out.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[33].proj_out.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].proj_out.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[33].proj_out.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[33].proj_out.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].proj_out.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[33].proj_out.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].proj_out._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].proj_out._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].proj_out.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[33].proj_out.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[33].proj_out.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].proj_out._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[33].proj_out._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].proj_out._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].proj_out._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33].proj_out._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[33].proj_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[33].proj_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[33]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34], accessed_by=GetItemGuardAccessor(34) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[34], 140581770785168) # for index_block, block in enumerate(self.single_transformer_blocks): # diffusers/src/diffusers/models/transformers/transformer_flux.py:509 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[34].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[34].training, 140591004393440) # for index_block, block in enumerate(self.single_transformer_blocks): # diffusers/src/diffusers/models/transformers/transformer_flux.py:509 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].attn, accessed_by=DictGetItemGuardAccessor(attn) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[34].attn, 140581770786368) # attn_output = self.attn( # diffusers/src/diffusers/models/transformers/transformer_flux.py:91 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].attn.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[34].attn.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].attn.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[34].attn.training, 140591004393440) # attn_output = self.attn( # diffusers/src/diffusers/models/transformers/transformer_flux.py:91 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].attn._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].attn.to_k, accessed_by=DictGetItemGuardAccessor(to_k) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[34].attn.to_k, 140531277249648) # key = attn.to_k(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1717 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].attn.to_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[34].attn.to_k.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].attn.to_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[34].attn.to_k.training, 140591004393408) # key = attn.to_k(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1717 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].attn.to_k._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].attn.to_k.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[34].attn.to_k.lora_A, 140531277250368) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].attn.to_k.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].attn.to_k.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[34].attn.to_k.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].attn.to_k.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[34].attn.to_k.lora_A['default_0'], 140531277243936) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].attn.to_k.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].attn.to_k.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[34].attn.to_k.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].attn.to_k.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].attn.to_k.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[34].attn.to_k.lora_A['default_0'].weight, 140526264850752) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].attn.to_k.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[34].attn.to_k.lora_B, 140531277249888) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].attn.to_k.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].attn.to_k.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[34].attn.to_k.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].attn.to_k.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[34].attn.to_k.lora_B['default_0'], 140531277244464) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].attn.to_k.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].attn.to_k.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[34].attn.to_k.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].attn.to_k.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[34].attn.to_k.base_layer, 140581770786512) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].attn.to_k.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].attn.to_k.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[34].attn.to_k.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].attn.to_k.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[34].attn.to_k.lora_dropout, 140531277248304) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].attn.to_k.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].attn.to_k.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[34].attn.to_k.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].attn.to_k.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[34].attn.to_k.lora_dropout['default_0'], 140531277249312) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].attn.to_k.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].attn.to_k.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[34].attn.to_k.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].attn.to_k.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[34].attn.to_k.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[34].attn.to_k.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].attn.to_k.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[34].attn.to_k.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].attn.to_k.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[34].attn.to_k.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[34].attn.to_k.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].attn.to_k.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[34].attn.to_k.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].attn.to_k._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].attn.to_k._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].attn.to_k.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[34].attn.to_k.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[34].attn.to_k.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].attn.to_k._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[34].attn.to_k._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].attn.to_k._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].attn.to_k._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].attn.to_k._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[34].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[34].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].attn.to_q, accessed_by=DictGetItemGuardAccessor(to_q) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[34].attn.to_q, 140531277247968) # query = attn.to_q(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1716 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].attn.to_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[34].attn.to_q.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].attn.to_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[34].attn.to_q.training, 140591004393408) # query = attn.to_q(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1716 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].attn.to_q._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].attn.to_q.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[34].attn.to_q.lora_A, 140531277248448) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].attn.to_q.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].attn.to_q.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[34].attn.to_q.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].attn.to_q.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[34].attn.to_q.lora_A['default_0'], 140531277249408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].attn.to_q.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].attn.to_q.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[34].attn.to_q.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].attn.to_q.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].attn.to_q.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[34].attn.to_q.lora_A['default_0'].weight, 140526264858592) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].attn.to_q.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[34].attn.to_q.lora_B, 140531277249168) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].attn.to_q.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].attn.to_q.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[34].attn.to_q.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].attn.to_q.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[34].attn.to_q.lora_B['default_0'], 140531277248928) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].attn.to_q.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].attn.to_q.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[34].attn.to_q.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].attn.to_q.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[34].attn.to_q.base_layer, 140581770786608) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].attn.to_q.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].attn.to_q.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[34].attn.to_q.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].attn.to_q.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[34].attn.to_q.lora_dropout, 140531277247584) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].attn.to_q.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].attn.to_q.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[34].attn.to_q.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].attn.to_q.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[34].attn.to_q.lora_dropout['default_0'], 140531277248064) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].attn.to_q.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].attn.to_q.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[34].attn.to_q.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].attn.to_q.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[34].attn.to_q.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[34].attn.to_q.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].attn.to_q.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[34].attn.to_q.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].attn.to_q.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[34].attn.to_q.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[34].attn.to_q.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].attn.to_q.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[34].attn.to_q.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].attn.to_q._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].attn.to_q._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].attn.to_q.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[34].attn.to_q.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[34].attn.to_q.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].attn.to_q._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[34].attn.to_q._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].attn.to_q._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].attn.to_q._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].attn.to_q._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[34].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[34].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].attn.to_v, accessed_by=DictGetItemGuardAccessor(to_v) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[34].attn.to_v, 140531277249696) # value = attn.to_v(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1718 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].attn.to_v.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[34].attn.to_v.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].attn.to_v.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[34].attn.to_v.training, 140591004393408) # value = attn.to_v(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1718 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].attn.to_v._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].attn.to_v.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[34].attn.to_v.lora_A, 140531277365360) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].attn.to_v.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].attn.to_v.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[34].attn.to_v.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].attn.to_v.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[34].attn.to_v.lora_A['default_0'], 140531277367808) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].attn.to_v.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].attn.to_v.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[34].attn.to_v.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].attn.to_v.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].attn.to_v.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[34].attn.to_v.lora_A['default_0'].weight, 140526264853712) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].attn.to_v.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[34].attn.to_v.lora_B, 140531277365888) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].attn.to_v.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].attn.to_v.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[34].attn.to_v.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].attn.to_v.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[34].attn.to_v.lora_B['default_0'], 140531277367568) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].attn.to_v.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].attn.to_v.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[34].attn.to_v.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].attn.to_v.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[34].attn.to_v.base_layer, 140581770786656) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].attn.to_v.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].attn.to_v.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[34].attn.to_v.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].attn.to_v.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[34].attn.to_v.lora_dropout, 140531277365600) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].attn.to_v.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].attn.to_v.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[34].attn.to_v.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].attn.to_v.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[34].attn.to_v.lora_dropout['default_0'], 140531277366032) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].attn.to_v.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].attn.to_v.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[34].attn.to_v.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].attn.to_v.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[34].attn.to_v.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[34].attn.to_v.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].attn.to_v.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[34].attn.to_v.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].attn.to_v.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[34].attn.to_v.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[34].attn.to_v.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].attn.to_v.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[34].attn.to_v.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].attn.to_v._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].attn.to_v._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].attn.to_v.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[34].attn.to_v.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[34].attn.to_v.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].attn.to_v._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[34].attn.to_v._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].attn.to_v._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].attn.to_v._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].attn.to_v._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[34].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[34].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].attn.norm_k, accessed_by=DictGetItemGuardAccessor(norm_k) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[34].attn.norm_k, 140581770786560) # if attn.norm_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1729 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].attn.norm_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[34].attn.norm_k.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].attn.norm_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[34].attn.norm_k.training, 140591004393440) # if attn.norm_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1729 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].attn.norm_k.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[34].attn.norm_k.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].attn.norm_k._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].attn.norm_k.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[34].attn.norm_k.weight, 140581783350592) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].attn.norm_k._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].attn.norm_k._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].attn.norm_k._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].attn.norm_k._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].attn.norm_q, accessed_by=DictGetItemGuardAccessor(norm_q) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[34].attn.norm_q, 140581770786416) # if attn.norm_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1727 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].attn.norm_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[34].attn.norm_q.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].attn.norm_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[34].attn.norm_q.training, 140591004393440) # if attn.norm_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1727 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].attn.norm_q.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[34].attn.norm_q.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].attn.norm_q._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].attn.norm_q.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[34].attn.norm_q.weight, 140581766108944) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].attn.norm_q._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].attn.norm_q._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].attn.norm_q._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].attn.norm_q._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].attn.heads, accessed_by=DictGetItemGuardAccessor(heads) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[34].attn.heads == 24 # head_dim = inner_dim // attn.heads # diffusers/src/diffusers/models/attention_processor.py:1721 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].attn.processor, accessed_by=DictGetItemGuardAccessor(processor) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[34].attn.processor, 93831581524080) # attn_parameters = set(inspect.signature(self.processor.__call__).parameters.keys()) # diffusers/src/diffusers/models/attention_processor.py:479 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[34].attn.processor, 140581770786320) # return self.processor( # diffusers/src/diffusers/models/attention_processor.py:490 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].attn._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].attn._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].attn._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].attn._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].attn.forward, accessed_by=GetAttrGuardAccessor(forward) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].attn.forward, accessed_by=FuncDefaultsGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].attn.forward.__defaults__[0], accessed_by=GetItemGuardAccessor(0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[34].attn.forward.__defaults__[0], 140591004478624) # batch_size, _, _ = hidden_states.shape if encoder_hidden_states is None else encoder_hidden_states.shape # diffusers/src/diffusers/models/attention_processor.py:1713 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].norm, accessed_by=DictGetItemGuardAccessor(norm) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[34].norm, 140581770785984) # norm_hidden_states, gate = self.norm(hidden_states, emb=temb) # diffusers/src/diffusers/models/transformers/transformer_flux.py:88 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].norm.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[34].norm.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].norm.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[34].norm.training, 140591004393440) # norm_hidden_states, gate = self.norm(hidden_states, emb=temb) # diffusers/src/diffusers/models/transformers/transformer_flux.py:88 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].norm._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].norm.norm, accessed_by=DictGetItemGuardAccessor(norm) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[34].norm.norm, 140581770786128) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:171 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].norm.norm.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].norm.norm.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[34].norm.norm.training, 140591004393440) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:171 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].norm.silu, accessed_by=DictGetItemGuardAccessor(silu) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[34].norm.silu, 140581770786032) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].norm.silu.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].norm.silu.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[34].norm.silu.training, 140591004393440) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].norm.linear, accessed_by=DictGetItemGuardAccessor(linear) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[34].norm.linear, 140531276599920) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].norm.linear.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[34].norm.linear.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].norm.linear.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[34].norm.linear.training, 140591004393408) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].norm.linear._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].norm.linear.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[34].norm.linear.lora_A, 140531277114592) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].norm.linear.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].norm.linear.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[34].norm.linear.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].norm.linear.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[34].norm.linear.lora_A['default_0'], 140531277116224) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].norm.linear.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].norm.linear.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[34].norm.linear.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].norm.linear.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].norm.linear.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[34].norm.linear.lora_A['default_0'].weight, 140526264852752) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].norm.linear.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[34].norm.linear.lora_B, 140531277115024) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].norm.linear.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].norm.linear.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[34].norm.linear.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].norm.linear.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[34].norm.linear.lora_B['default_0'], 140531277118768) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].norm.linear.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].norm.linear.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[34].norm.linear.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].norm.linear.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[34].norm.linear.base_layer, 140581770786080) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].norm.linear.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].norm.linear.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[34].norm.linear.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].norm.linear.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[34].norm.linear.lora_dropout, 140531277113968) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].norm.linear.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].norm.linear.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[34].norm.linear.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].norm.linear.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[34].norm.linear.lora_dropout['default_0'], 140531277112480) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].norm.linear.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].norm.linear.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[34].norm.linear.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].norm.linear.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[34].norm.linear.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[34].norm.linear.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].norm.linear.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[34].norm.linear.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].norm.linear.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[34].norm.linear.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[34].norm.linear.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].norm.linear.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[34].norm.linear.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].norm.linear._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].norm.linear._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].norm.linear.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[34].norm.linear.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[34].norm.linear.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].norm.linear._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[34].norm.linear._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].norm.linear._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].norm.linear._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].norm.linear._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[34].norm.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[34].norm.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].norm._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].norm._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].norm._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].norm._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].act_mlp, accessed_by=DictGetItemGuardAccessor(act_mlp) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[34].act_mlp, 140581770786224) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].act_mlp.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].act_mlp.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[34].act_mlp.training, 140591004393440) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].proj_mlp, accessed_by=DictGetItemGuardAccessor(proj_mlp) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[34].proj_mlp, 140531277106096) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].proj_mlp.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[34].proj_mlp.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].proj_mlp.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[34].proj_mlp.training, 140591004393408) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].proj_mlp._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].proj_mlp.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[34].proj_mlp.lora_A, 140531277115408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].proj_mlp.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].proj_mlp.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[34].proj_mlp.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].proj_mlp.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[34].proj_mlp.lora_A['default_0'], 140531277115840) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].proj_mlp.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].proj_mlp.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[34].proj_mlp.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].proj_mlp.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].proj_mlp.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[34].proj_mlp.lora_A['default_0'].weight, 140526264844992) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].proj_mlp.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[34].proj_mlp.lora_B, 140531277105376) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].proj_mlp.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].proj_mlp.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[34].proj_mlp.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].proj_mlp.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[34].proj_mlp.lora_B['default_0'], 140531277108976) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].proj_mlp.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].proj_mlp.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[34].proj_mlp.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].proj_mlp.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[34].proj_mlp.base_layer, 140581770786176) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].proj_mlp.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].proj_mlp.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[34].proj_mlp.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].proj_mlp.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[34].proj_mlp.lora_dropout, 140531277113920) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].proj_mlp.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].proj_mlp.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[34].proj_mlp.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].proj_mlp.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[34].proj_mlp.lora_dropout['default_0'], 140531277112864) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].proj_mlp.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].proj_mlp.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[34].proj_mlp.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].proj_mlp.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[34].proj_mlp.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[34].proj_mlp.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].proj_mlp.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[34].proj_mlp.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].proj_mlp.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[34].proj_mlp.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[34].proj_mlp.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].proj_mlp.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[34].proj_mlp.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].proj_mlp._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].proj_mlp._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].proj_mlp.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[34].proj_mlp.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[34].proj_mlp.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].proj_mlp._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[34].proj_mlp._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].proj_mlp._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].proj_mlp._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].proj_mlp._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[34].proj_mlp._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[34].proj_mlp._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].proj_out, accessed_by=DictGetItemGuardAccessor(proj_out) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[34].proj_out, 140531277236592) # hidden_states = gate * self.proj_out(hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:98 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].proj_out.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[34].proj_out.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].proj_out.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[34].proj_out.training, 140591004393408) # hidden_states = gate * self.proj_out(hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:98 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].proj_out._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].proj_out.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[34].proj_out.lora_A, 140531277237936) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].proj_out.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].proj_out.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[34].proj_out.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].proj_out.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[34].proj_out.lora_A['default_0'], 140531277246528) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].proj_out.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].proj_out.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[34].proj_out.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].proj_out.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].proj_out.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[34].proj_out.lora_A['default_0'].weight, 140526264854112) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].proj_out.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[34].proj_out.lora_B, 140531277237120) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].proj_out.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].proj_out.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[34].proj_out.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].proj_out.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[34].proj_out.lora_B['default_0'], 140531277247392) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].proj_out.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].proj_out.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[34].proj_out.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].proj_out.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[34].proj_out.base_layer, 140581770786272) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].proj_out.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].proj_out.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[34].proj_out.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].proj_out.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[34].proj_out.lora_dropout, 140531277242112) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].proj_out.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].proj_out.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[34].proj_out.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].proj_out.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[34].proj_out.lora_dropout['default_0'], 140531277237168) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].proj_out.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].proj_out.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[34].proj_out.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].proj_out.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[34].proj_out.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[34].proj_out.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].proj_out.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[34].proj_out.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].proj_out.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[34].proj_out.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[34].proj_out.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].proj_out.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[34].proj_out.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].proj_out._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].proj_out._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].proj_out.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[34].proj_out.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[34].proj_out.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].proj_out._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[34].proj_out._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].proj_out._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].proj_out._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34].proj_out._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[34].proj_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[34].proj_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[34]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35], accessed_by=GetItemGuardAccessor(35) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[35], 140581770785936) # for index_block, block in enumerate(self.single_transformer_blocks): # diffusers/src/diffusers/models/transformers/transformer_flux.py:509 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[35].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[35].training, 140591004393440) # for index_block, block in enumerate(self.single_transformer_blocks): # diffusers/src/diffusers/models/transformers/transformer_flux.py:509 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].attn, accessed_by=DictGetItemGuardAccessor(attn) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[35].attn, 140581770787136) # attn_output = self.attn( # diffusers/src/diffusers/models/transformers/transformer_flux.py:91 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].attn.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[35].attn.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].attn.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[35].attn.training, 140591004393440) # attn_output = self.attn( # diffusers/src/diffusers/models/transformers/transformer_flux.py:91 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].attn._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].attn.to_k, accessed_by=DictGetItemGuardAccessor(to_k) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[35].attn.to_k, 140531277377216) # key = attn.to_k(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1717 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].attn.to_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[35].attn.to_k.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].attn.to_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[35].attn.to_k.training, 140591004393408) # key = attn.to_k(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1717 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].attn.to_k._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].attn.to_k.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[35].attn.to_k.lora_A, 140531277377696) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].attn.to_k.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].attn.to_k.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[35].attn.to_k.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].attn.to_k.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[35].attn.to_k.lora_A['default_0'], 140531277378512) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].attn.to_k.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].attn.to_k.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[35].attn.to_k.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].attn.to_k.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].attn.to_k.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[35].attn.to_k.lora_A['default_0'].weight, 140531238122608) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].attn.to_k.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[35].attn.to_k.lora_B, 140531277377888) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].attn.to_k.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].attn.to_k.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[35].attn.to_k.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].attn.to_k.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[35].attn.to_k.lora_B['default_0'], 140531277378944) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].attn.to_k.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].attn.to_k.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[35].attn.to_k.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].attn.to_k.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[35].attn.to_k.base_layer, 140581770787280) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].attn.to_k.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].attn.to_k.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[35].attn.to_k.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].attn.to_k.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[35].attn.to_k.lora_dropout, 140531277376208) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].attn.to_k.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].attn.to_k.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[35].attn.to_k.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].attn.to_k.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[35].attn.to_k.lora_dropout['default_0'], 140531277377264) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].attn.to_k.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].attn.to_k.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[35].attn.to_k.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].attn.to_k.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[35].attn.to_k.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[35].attn.to_k.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].attn.to_k.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[35].attn.to_k.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].attn.to_k.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[35].attn.to_k.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[35].attn.to_k.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].attn.to_k.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[35].attn.to_k.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].attn.to_k._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].attn.to_k._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].attn.to_k.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[35].attn.to_k.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[35].attn.to_k.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].attn.to_k._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[35].attn.to_k._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].attn.to_k._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].attn.to_k._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].attn.to_k._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[35].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[35].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].attn.to_q, accessed_by=DictGetItemGuardAccessor(to_q) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[35].attn.to_q, 140531277375152) # query = attn.to_q(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1716 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].attn.to_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[35].attn.to_q.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].attn.to_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[35].attn.to_q.training, 140591004393408) # query = attn.to_q(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1716 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].attn.to_q._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].attn.to_q.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[35].attn.to_q.lora_A, 140531277375488) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].attn.to_q.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].attn.to_q.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[35].attn.to_q.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].attn.to_q.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[35].attn.to_q.lora_A['default_0'], 140531277376400) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].attn.to_q.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].attn.to_q.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[35].attn.to_q.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].attn.to_q.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].attn.to_q.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[35].attn.to_q.lora_A['default_0'].weight, 140531238123088) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].attn.to_q.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[35].attn.to_q.lora_B, 140531277375440) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].attn.to_q.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].attn.to_q.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[35].attn.to_q.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].attn.to_q.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[35].attn.to_q.lora_B['default_0'], 140531277376448) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].attn.to_q.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].attn.to_q.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[35].attn.to_q.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].attn.to_q.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[35].attn.to_q.base_layer, 140581770787376) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].attn.to_q.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].attn.to_q.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[35].attn.to_q.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].attn.to_q.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[35].attn.to_q.lora_dropout, 140531277375008) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].attn.to_q.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].attn.to_q.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[35].attn.to_q.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].attn.to_q.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[35].attn.to_q.lora_dropout['default_0'], 140531277374960) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].attn.to_q.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].attn.to_q.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[35].attn.to_q.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].attn.to_q.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[35].attn.to_q.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[35].attn.to_q.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].attn.to_q.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[35].attn.to_q.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].attn.to_q.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[35].attn.to_q.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[35].attn.to_q.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].attn.to_q.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[35].attn.to_q.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].attn.to_q._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].attn.to_q._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].attn.to_q.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[35].attn.to_q.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[35].attn.to_q.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].attn.to_q._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[35].attn.to_q._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].attn.to_q._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].attn.to_q._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].attn.to_q._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[35].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[35].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].attn.to_v, accessed_by=DictGetItemGuardAccessor(to_v) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[35].attn.to_v, 140531277379472) # value = attn.to_v(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1718 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].attn.to_v.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[35].attn.to_v.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].attn.to_v.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[35].attn.to_v.training, 140591004393408) # value = attn.to_v(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1718 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].attn.to_v._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].attn.to_v.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[35].attn.to_v.lora_A, 140531277379712) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].attn.to_v.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].attn.to_v.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[35].attn.to_v.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].attn.to_v.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[35].attn.to_v.lora_A['default_0'], 140531277380576) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].attn.to_v.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].attn.to_v.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[35].attn.to_v.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].attn.to_v.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].attn.to_v.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[35].attn.to_v.lora_A['default_0'].weight, 140531238118128) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].attn.to_v.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[35].attn.to_v.lora_B, 140531277379136) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].attn.to_v.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].attn.to_v.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[35].attn.to_v.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].attn.to_v.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[35].attn.to_v.lora_B['default_0'], 140531277380192) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].attn.to_v.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].attn.to_v.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[35].attn.to_v.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].attn.to_v.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[35].attn.to_v.base_layer, 140581770787424) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].attn.to_v.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].attn.to_v.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[35].attn.to_v.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].attn.to_v.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[35].attn.to_v.lora_dropout, 140531277379232) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].attn.to_v.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].attn.to_v.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[35].attn.to_v.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].attn.to_v.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[35].attn.to_v.lora_dropout['default_0'], 140531277378848) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].attn.to_v.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].attn.to_v.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[35].attn.to_v.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].attn.to_v.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[35].attn.to_v.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[35].attn.to_v.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].attn.to_v.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[35].attn.to_v.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].attn.to_v.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[35].attn.to_v.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[35].attn.to_v.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].attn.to_v.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[35].attn.to_v.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].attn.to_v._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].attn.to_v._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].attn.to_v.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[35].attn.to_v.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[35].attn.to_v.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].attn.to_v._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[35].attn.to_v._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].attn.to_v._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].attn.to_v._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].attn.to_v._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[35].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[35].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].attn.norm_k, accessed_by=DictGetItemGuardAccessor(norm_k) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[35].attn.norm_k, 140581770787328) # if attn.norm_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1729 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].attn.norm_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[35].attn.norm_k.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].attn.norm_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[35].attn.norm_k.training, 140591004393440) # if attn.norm_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1729 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].attn.norm_k.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[35].attn.norm_k.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].attn.norm_k._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].attn.norm_k.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[35].attn.norm_k.weight, 140581771030704) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].attn.norm_k._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].attn.norm_k._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].attn.norm_k._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].attn.norm_k._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].attn.norm_q, accessed_by=DictGetItemGuardAccessor(norm_q) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[35].attn.norm_q, 140581770787184) # if attn.norm_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1727 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].attn.norm_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[35].attn.norm_q.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].attn.norm_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[35].attn.norm_q.training, 140591004393440) # if attn.norm_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1727 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].attn.norm_q.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[35].attn.norm_q.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].attn.norm_q._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].attn.norm_q.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[35].attn.norm_q.weight, 140581771031264) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].attn.norm_q._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].attn.norm_q._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].attn.norm_q._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].attn.norm_q._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].attn.heads, accessed_by=DictGetItemGuardAccessor(heads) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[35].attn.heads == 24 # head_dim = inner_dim // attn.heads # diffusers/src/diffusers/models/attention_processor.py:1721 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].attn.processor, accessed_by=DictGetItemGuardAccessor(processor) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[35].attn.processor, 93831581524080) # attn_parameters = set(inspect.signature(self.processor.__call__).parameters.keys()) # diffusers/src/diffusers/models/attention_processor.py:479 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[35].attn.processor, 140581770787088) # return self.processor( # diffusers/src/diffusers/models/attention_processor.py:490 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].attn._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].attn._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].attn._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].attn._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].attn.forward, accessed_by=GetAttrGuardAccessor(forward) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].attn.forward, accessed_by=FuncDefaultsGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].attn.forward.__defaults__[0], accessed_by=GetItemGuardAccessor(0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[35].attn.forward.__defaults__[0], 140591004478624) # batch_size, _, _ = hidden_states.shape if encoder_hidden_states is None else encoder_hidden_states.shape # diffusers/src/diffusers/models/attention_processor.py:1713 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].norm, accessed_by=DictGetItemGuardAccessor(norm) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[35].norm, 140581770786752) # norm_hidden_states, gate = self.norm(hidden_states, emb=temb) # diffusers/src/diffusers/models/transformers/transformer_flux.py:88 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].norm.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[35].norm.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].norm.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[35].norm.training, 140591004393440) # norm_hidden_states, gate = self.norm(hidden_states, emb=temb) # diffusers/src/diffusers/models/transformers/transformer_flux.py:88 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].norm._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].norm.norm, accessed_by=DictGetItemGuardAccessor(norm) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[35].norm.norm, 140581770786896) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:171 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].norm.norm.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].norm.norm.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[35].norm.norm.training, 140591004393440) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:171 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].norm.silu, accessed_by=DictGetItemGuardAccessor(silu) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[35].norm.silu, 140581770786800) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].norm.silu.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].norm.silu.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[35].norm.silu.training, 140591004393440) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].norm.linear, accessed_by=DictGetItemGuardAccessor(linear) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[35].norm.linear, 140531277368144) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].norm.linear.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[35].norm.linear.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].norm.linear.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[35].norm.linear.training, 140591004393408) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].norm.linear._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].norm.linear.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[35].norm.linear.lora_A, 140531277368432) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].norm.linear.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].norm.linear.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[35].norm.linear.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].norm.linear.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[35].norm.linear.lora_A['default_0'], 140531277369152) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].norm.linear.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].norm.linear.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[35].norm.linear.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].norm.linear.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].norm.linear.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[35].norm.linear.lora_A['default_0'].weight, 140531238116688) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].norm.linear.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[35].norm.linear.lora_B, 140531277369104) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].norm.linear.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].norm.linear.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[35].norm.linear.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].norm.linear.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[35].norm.linear.lora_B['default_0'], 140531277370304) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].norm.linear.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].norm.linear.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[35].norm.linear.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].norm.linear.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[35].norm.linear.base_layer, 140581770786848) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].norm.linear.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].norm.linear.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[35].norm.linear.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].norm.linear.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[35].norm.linear.lora_dropout, 140531277367472) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].norm.linear.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].norm.linear.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[35].norm.linear.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].norm.linear.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[35].norm.linear.lora_dropout['default_0'], 140531277367904) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].norm.linear.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].norm.linear.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[35].norm.linear.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].norm.linear.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[35].norm.linear.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[35].norm.linear.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].norm.linear.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[35].norm.linear.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].norm.linear.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[35].norm.linear.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[35].norm.linear.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].norm.linear.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[35].norm.linear.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].norm.linear._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].norm.linear._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].norm.linear.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[35].norm.linear.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[35].norm.linear.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].norm.linear._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[35].norm.linear._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].norm.linear._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].norm.linear._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].norm.linear._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[35].norm.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[35].norm.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].norm._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].norm._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].norm._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].norm._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].act_mlp, accessed_by=DictGetItemGuardAccessor(act_mlp) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[35].act_mlp, 140581770786992) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].act_mlp.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].act_mlp.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[35].act_mlp.training, 140591004393440) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].proj_mlp, accessed_by=DictGetItemGuardAccessor(proj_mlp) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[35].proj_mlp, 140531277370784) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].proj_mlp.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[35].proj_mlp.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].proj_mlp.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[35].proj_mlp.training, 140591004393408) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].proj_mlp._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].proj_mlp.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[35].proj_mlp.lora_A, 140531277371024) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].proj_mlp.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].proj_mlp.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[35].proj_mlp.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].proj_mlp.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[35].proj_mlp.lora_A['default_0'], 140531277372128) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].proj_mlp.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].proj_mlp.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[35].proj_mlp.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].proj_mlp.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].proj_mlp.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[35].proj_mlp.lora_A['default_0'].weight, 140531238109808) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].proj_mlp.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[35].proj_mlp.lora_B, 140531277370688) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].proj_mlp.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].proj_mlp.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[35].proj_mlp.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].proj_mlp.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[35].proj_mlp.lora_B['default_0'], 140531277371840) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].proj_mlp.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].proj_mlp.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[35].proj_mlp.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].proj_mlp.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[35].proj_mlp.base_layer, 140581770786944) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].proj_mlp.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].proj_mlp.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[35].proj_mlp.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].proj_mlp.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[35].proj_mlp.lora_dropout, 140531277370400) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].proj_mlp.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].proj_mlp.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[35].proj_mlp.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].proj_mlp.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[35].proj_mlp.lora_dropout['default_0'], 140531277370352) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].proj_mlp.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].proj_mlp.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[35].proj_mlp.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].proj_mlp.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[35].proj_mlp.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[35].proj_mlp.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].proj_mlp.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[35].proj_mlp.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].proj_mlp.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[35].proj_mlp.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[35].proj_mlp.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].proj_mlp.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[35].proj_mlp.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].proj_mlp._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].proj_mlp._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].proj_mlp.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[35].proj_mlp.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[35].proj_mlp.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].proj_mlp._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[35].proj_mlp._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].proj_mlp._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].proj_mlp._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].proj_mlp._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[35].proj_mlp._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[35].proj_mlp._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].proj_out, accessed_by=DictGetItemGuardAccessor(proj_out) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[35].proj_out, 140531277372656) # hidden_states = gate * self.proj_out(hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:98 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].proj_out.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[35].proj_out.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].proj_out.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[35].proj_out.training, 140591004393408) # hidden_states = gate * self.proj_out(hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:98 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].proj_out._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].proj_out.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[35].proj_out.lora_A, 140531277372992) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].proj_out.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].proj_out.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[35].proj_out.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].proj_out.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[35].proj_out.lora_A['default_0'], 140531277374336) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].proj_out.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].proj_out.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[35].proj_out.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].proj_out.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].proj_out.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[35].proj_out.lora_A['default_0'].weight, 140531238118288) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].proj_out.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[35].proj_out.lora_B, 140531277373472) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].proj_out.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].proj_out.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[35].proj_out.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].proj_out.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[35].proj_out.lora_B['default_0'], 140531277374288) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].proj_out.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].proj_out.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[35].proj_out.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].proj_out.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[35].proj_out.base_layer, 140581770787040) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].proj_out.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].proj_out.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[35].proj_out.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].proj_out.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[35].proj_out.lora_dropout, 140531277372896) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].proj_out.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].proj_out.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[35].proj_out.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].proj_out.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[35].proj_out.lora_dropout['default_0'], 140531277372848) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].proj_out.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].proj_out.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[35].proj_out.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].proj_out.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[35].proj_out.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[35].proj_out.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].proj_out.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[35].proj_out.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].proj_out.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[35].proj_out.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[35].proj_out.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].proj_out.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[35].proj_out.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].proj_out._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].proj_out._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].proj_out.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[35].proj_out.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[35].proj_out.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].proj_out._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[35].proj_out._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].proj_out._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].proj_out._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35].proj_out._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[35].proj_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[35].proj_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[35]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36], accessed_by=GetItemGuardAccessor(36) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[36], 140581770786704) # for index_block, block in enumerate(self.single_transformer_blocks): # diffusers/src/diffusers/models/transformers/transformer_flux.py:509 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[36].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[36].training, 140591004393440) # for index_block, block in enumerate(self.single_transformer_blocks): # diffusers/src/diffusers/models/transformers/transformer_flux.py:509 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].attn, accessed_by=DictGetItemGuardAccessor(attn) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[36].attn, 140581770787904) # attn_output = self.attn( # diffusers/src/diffusers/models/transformers/transformer_flux.py:91 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].attn.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[36].attn.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].attn.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[36].attn.training, 140591004393440) # attn_output = self.attn( # diffusers/src/diffusers/models/transformers/transformer_flux.py:91 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].attn._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].attn.to_k, accessed_by=DictGetItemGuardAccessor(to_k) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[36].attn.to_k, 140531277488064) # key = attn.to_k(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1717 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].attn.to_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[36].attn.to_k.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].attn.to_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[36].attn.to_k.training, 140591004393408) # key = attn.to_k(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1717 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].attn.to_k._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].attn.to_k.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[36].attn.to_k.lora_A, 140531275898384) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].attn.to_k.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].attn.to_k.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[36].attn.to_k.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].attn.to_k.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[36].attn.to_k.lora_A['default_0'], 140531275903328) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].attn.to_k.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].attn.to_k.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[36].attn.to_k.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].attn.to_k.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].attn.to_k.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[36].attn.to_k.lora_A['default_0'].weight, 140531238112688) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].attn.to_k.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[36].attn.to_k.lora_B, 140531275904768) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].attn.to_k.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].attn.to_k.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[36].attn.to_k.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].attn.to_k.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[36].attn.to_k.lora_B['default_0'], 140531275904144) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].attn.to_k.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].attn.to_k.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[36].attn.to_k.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].attn.to_k.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[36].attn.to_k.base_layer, 140581770788048) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].attn.to_k.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].attn.to_k.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[36].attn.to_k.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].attn.to_k.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[36].attn.to_k.lora_dropout, 140531275755104) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].attn.to_k.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].attn.to_k.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[36].attn.to_k.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].attn.to_k.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[36].attn.to_k.lora_dropout['default_0'], 140531275752512) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].attn.to_k.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].attn.to_k.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[36].attn.to_k.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].attn.to_k.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[36].attn.to_k.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[36].attn.to_k.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].attn.to_k.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[36].attn.to_k.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].attn.to_k.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[36].attn.to_k.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[36].attn.to_k.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].attn.to_k.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[36].attn.to_k.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].attn.to_k._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].attn.to_k._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].attn.to_k.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[36].attn.to_k.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[36].attn.to_k.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].attn.to_k._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[36].attn.to_k._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].attn.to_k._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].attn.to_k._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].attn.to_k._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[36].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[36].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].attn.to_q, accessed_by=DictGetItemGuardAccessor(to_q) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[36].attn.to_q, 140531276070144) # query = attn.to_q(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1716 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].attn.to_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[36].attn.to_q.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].attn.to_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[36].attn.to_q.training, 140591004393408) # query = attn.to_q(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1716 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].attn.to_q._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].attn.to_q.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[36].attn.to_q.lora_A, 140531276069952) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].attn.to_q.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].attn.to_q.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[36].attn.to_q.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].attn.to_q.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[36].attn.to_q.lora_A['default_0'], 140531276068320) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].attn.to_q.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].attn.to_q.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[36].attn.to_q.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].attn.to_q.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].attn.to_q.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[36].attn.to_q.lora_A['default_0'].weight, 140531238114288) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].attn.to_q.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[36].attn.to_q.lora_B, 140531276069856) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].attn.to_q.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].attn.to_q.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[36].attn.to_q.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].attn.to_q.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[36].attn.to_q.lora_B['default_0'], 140531276057424) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].attn.to_q.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].attn.to_q.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[36].attn.to_q.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].attn.to_q.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[36].attn.to_q.base_layer, 140581770788144) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].attn.to_q.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].attn.to_q.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[36].attn.to_q.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].attn.to_q.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[36].attn.to_q.lora_dropout, 140531276069424) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].attn.to_q.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].attn.to_q.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[36].attn.to_q.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].attn.to_q.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[36].attn.to_q.lora_dropout['default_0'], 140531276070240) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].attn.to_q.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].attn.to_q.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[36].attn.to_q.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].attn.to_q.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[36].attn.to_q.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[36].attn.to_q.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].attn.to_q.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[36].attn.to_q.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].attn.to_q.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[36].attn.to_q.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[36].attn.to_q.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].attn.to_q.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[36].attn.to_q.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].attn.to_q._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].attn.to_q._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].attn.to_q.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[36].attn.to_q.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[36].attn.to_q.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].attn.to_q._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[36].attn.to_q._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].attn.to_q._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].attn.to_q._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].attn.to_q._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[36].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[36].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].attn.to_v, accessed_by=DictGetItemGuardAccessor(to_v) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[36].attn.to_v, 140531278323888) # value = attn.to_v(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1718 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].attn.to_v.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[36].attn.to_v.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].attn.to_v.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[36].attn.to_v.training, 140591004393408) # value = attn.to_v(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1718 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].attn.to_v._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].attn.to_v.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[36].attn.to_v.lora_A, 140531277950992) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].attn.to_v.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].attn.to_v.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[36].attn.to_v.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].attn.to_v.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[36].attn.to_v.lora_A['default_0'], 140531278046656) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].attn.to_v.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].attn.to_v.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[36].attn.to_v.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].attn.to_v.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].attn.to_v.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[36].attn.to_v.lora_A['default_0'].weight, 140531238112848) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].attn.to_v.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[36].attn.to_v.lora_B, 140531277945184) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].attn.to_v.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].attn.to_v.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[36].attn.to_v.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].attn.to_v.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[36].attn.to_v.lora_B['default_0'], 140531274888384) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].attn.to_v.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].attn.to_v.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[36].attn.to_v.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].attn.to_v.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[36].attn.to_v.base_layer, 140581770788192) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].attn.to_v.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].attn.to_v.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[36].attn.to_v.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].attn.to_v.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[36].attn.to_v.lora_dropout, 140531277954448) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].attn.to_v.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].attn.to_v.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[36].attn.to_v.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].attn.to_v.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[36].attn.to_v.lora_dropout['default_0'], 140531277949408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].attn.to_v.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].attn.to_v.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[36].attn.to_v.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].attn.to_v.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[36].attn.to_v.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[36].attn.to_v.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].attn.to_v.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[36].attn.to_v.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].attn.to_v.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[36].attn.to_v.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[36].attn.to_v.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].attn.to_v.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[36].attn.to_v.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].attn.to_v._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].attn.to_v._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].attn.to_v.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[36].attn.to_v.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[36].attn.to_v.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].attn.to_v._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[36].attn.to_v._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].attn.to_v._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].attn.to_v._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].attn.to_v._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[36].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[36].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].attn.norm_k, accessed_by=DictGetItemGuardAccessor(norm_k) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[36].attn.norm_k, 140581770788096) # if attn.norm_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1729 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].attn.norm_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[36].attn.norm_k.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].attn.norm_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[36].attn.norm_k.training, 140591004393440) # if attn.norm_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1729 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].attn.norm_k.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[36].attn.norm_k.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].attn.norm_k._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].attn.norm_k.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[36].attn.norm_k.weight, 140581766111904) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].attn.norm_k._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].attn.norm_k._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].attn.norm_k._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].attn.norm_k._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].attn.norm_q, accessed_by=DictGetItemGuardAccessor(norm_q) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[36].attn.norm_q, 140581770787952) # if attn.norm_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1727 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].attn.norm_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[36].attn.norm_q.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].attn.norm_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[36].attn.norm_q.training, 140591004393440) # if attn.norm_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1727 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].attn.norm_q.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[36].attn.norm_q.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].attn.norm_q._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].attn.norm_q.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[36].attn.norm_q.weight, 140581773261648) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].attn.norm_q._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].attn.norm_q._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].attn.norm_q._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].attn.norm_q._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].attn.heads, accessed_by=DictGetItemGuardAccessor(heads) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[36].attn.heads == 24 # head_dim = inner_dim // attn.heads # diffusers/src/diffusers/models/attention_processor.py:1721 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].attn.processor, accessed_by=DictGetItemGuardAccessor(processor) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[36].attn.processor, 93831581524080) # attn_parameters = set(inspect.signature(self.processor.__call__).parameters.keys()) # diffusers/src/diffusers/models/attention_processor.py:479 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[36].attn.processor, 140581770787856) # return self.processor( # diffusers/src/diffusers/models/attention_processor.py:490 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].attn._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].attn._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].attn._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].attn._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].attn.forward, accessed_by=GetAttrGuardAccessor(forward) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].attn.forward, accessed_by=FuncDefaultsGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].attn.forward.__defaults__[0], accessed_by=GetItemGuardAccessor(0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[36].attn.forward.__defaults__[0], 140591004478624) # batch_size, _, _ = hidden_states.shape if encoder_hidden_states is None else encoder_hidden_states.shape # diffusers/src/diffusers/models/attention_processor.py:1713 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].norm, accessed_by=DictGetItemGuardAccessor(norm) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[36].norm, 140581770787520) # norm_hidden_states, gate = self.norm(hidden_states, emb=temb) # diffusers/src/diffusers/models/transformers/transformer_flux.py:88 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].norm.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[36].norm.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].norm.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[36].norm.training, 140591004393440) # norm_hidden_states, gate = self.norm(hidden_states, emb=temb) # diffusers/src/diffusers/models/transformers/transformer_flux.py:88 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].norm._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].norm.norm, accessed_by=DictGetItemGuardAccessor(norm) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[36].norm.norm, 140581770787664) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:171 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].norm.norm.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].norm.norm.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[36].norm.norm.training, 140591004393440) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:171 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].norm.silu, accessed_by=DictGetItemGuardAccessor(silu) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[36].norm.silu, 140581770787568) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].norm.silu.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].norm.silu.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[36].norm.silu.training, 140591004393440) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].norm.linear, accessed_by=DictGetItemGuardAccessor(linear) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[36].norm.linear, 140531277380720) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].norm.linear.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[36].norm.linear.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].norm.linear.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[36].norm.linear.training, 140591004393408) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].norm.linear._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].norm.linear.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[36].norm.linear.lora_A, 140531277365504) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].norm.linear.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].norm.linear.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[36].norm.linear.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].norm.linear.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[36].norm.linear.lora_A['default_0'], 140531277491616) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].norm.linear.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].norm.linear.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[36].norm.linear.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].norm.linear.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].norm.linear.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[36].norm.linear.lora_A['default_0'].weight, 140531238116288) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].norm.linear.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[36].norm.linear.lora_B, 140531277365696) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].norm.linear.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].norm.linear.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[36].norm.linear.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].norm.linear.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[36].norm.linear.lora_B['default_0'], 140531277492432) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].norm.linear.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].norm.linear.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[36].norm.linear.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].norm.linear.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[36].norm.linear.base_layer, 140581770787616) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].norm.linear.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].norm.linear.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[36].norm.linear.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].norm.linear.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[36].norm.linear.lora_dropout, 140531277380960) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].norm.linear.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].norm.linear.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[36].norm.linear.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].norm.linear.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[36].norm.linear.lora_dropout['default_0'], 140531277381248) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].norm.linear.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].norm.linear.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[36].norm.linear.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].norm.linear.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[36].norm.linear.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[36].norm.linear.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].norm.linear.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[36].norm.linear.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].norm.linear.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[36].norm.linear.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[36].norm.linear.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].norm.linear.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[36].norm.linear.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].norm.linear._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].norm.linear._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].norm.linear.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[36].norm.linear.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[36].norm.linear.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].norm.linear._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[36].norm.linear._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].norm.linear._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].norm.linear._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].norm.linear._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[36].norm.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[36].norm.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].norm._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].norm._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].norm._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].norm._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].act_mlp, accessed_by=DictGetItemGuardAccessor(act_mlp) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[36].act_mlp, 140581770787760) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].act_mlp.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].act_mlp.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[36].act_mlp.training, 140591004393440) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].proj_mlp, accessed_by=DictGetItemGuardAccessor(proj_mlp) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[36].proj_mlp, 140531277012016) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].proj_mlp.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[36].proj_mlp.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].proj_mlp.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[36].proj_mlp.training, 140591004393408) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].proj_mlp._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].proj_mlp.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[36].proj_mlp.lora_A, 140531276155632) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].proj_mlp.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].proj_mlp.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[36].proj_mlp.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].proj_mlp.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[36].proj_mlp.lora_A['default_0'], 140531276161728) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].proj_mlp.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].proj_mlp.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[36].proj_mlp.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].proj_mlp.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].proj_mlp.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[36].proj_mlp.lora_A['default_0'].weight, 140531238114368) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].proj_mlp.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[36].proj_mlp.lora_B, 140531276161440) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].proj_mlp.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].proj_mlp.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[36].proj_mlp.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].proj_mlp.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[36].proj_mlp.lora_B['default_0'], 140531276162544) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].proj_mlp.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].proj_mlp.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[36].proj_mlp.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].proj_mlp.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[36].proj_mlp.base_layer, 140581770787712) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].proj_mlp.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].proj_mlp.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[36].proj_mlp.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].proj_mlp.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[36].proj_mlp.lora_dropout, 140531276162352) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].proj_mlp.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].proj_mlp.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[36].proj_mlp.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].proj_mlp.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[36].proj_mlp.lora_dropout['default_0'], 140531276155920) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].proj_mlp.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].proj_mlp.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[36].proj_mlp.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].proj_mlp.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[36].proj_mlp.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[36].proj_mlp.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].proj_mlp.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[36].proj_mlp.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].proj_mlp.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[36].proj_mlp.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[36].proj_mlp.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].proj_mlp.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[36].proj_mlp.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].proj_mlp._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].proj_mlp._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].proj_mlp.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[36].proj_mlp.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[36].proj_mlp.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].proj_mlp._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[36].proj_mlp._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].proj_mlp._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].proj_mlp._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].proj_mlp._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[36].proj_mlp._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[36].proj_mlp._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].proj_out, accessed_by=DictGetItemGuardAccessor(proj_out) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[36].proj_out, 140531276162688) # hidden_states = gate * self.proj_out(hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:98 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].proj_out.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[36].proj_out.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].proj_out.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[36].proj_out.training, 140591004393408) # hidden_states = gate * self.proj_out(hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:98 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].proj_out._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].proj_out.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[36].proj_out.lora_A, 140531276155488) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].proj_out.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].proj_out.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[36].proj_out.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].proj_out.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[36].proj_out.lora_A['default_0'], 140531276068992) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].proj_out.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].proj_out.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[36].proj_out.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].proj_out.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].proj_out.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[36].proj_out.lora_A['default_0'].weight, 140531238122368) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].proj_out.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[36].proj_out.lora_B, 140531276159520) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].proj_out.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].proj_out.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[36].proj_out.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].proj_out.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[36].proj_out.lora_B['default_0'], 140531276061552) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].proj_out.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].proj_out.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[36].proj_out.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].proj_out.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[36].proj_out.base_layer, 140581770787808) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].proj_out.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].proj_out.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[36].proj_out.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].proj_out.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[36].proj_out.lora_dropout, 140531276162928) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].proj_out.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].proj_out.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[36].proj_out.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].proj_out.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[36].proj_out.lora_dropout['default_0'], 140531276163168) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].proj_out.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].proj_out.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[36].proj_out.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].proj_out.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[36].proj_out.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[36].proj_out.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].proj_out.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[36].proj_out.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].proj_out.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[36].proj_out.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[36].proj_out.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].proj_out.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[36].proj_out.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].proj_out._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].proj_out._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].proj_out.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[36].proj_out.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[36].proj_out.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].proj_out._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[36].proj_out._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].proj_out._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].proj_out._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36].proj_out._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[36].proj_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[36].proj_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[36]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37], accessed_by=GetItemGuardAccessor(37) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[37], 140581770787472) # for index_block, block in enumerate(self.single_transformer_blocks): # diffusers/src/diffusers/models/transformers/transformer_flux.py:509 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[37].__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[37].training, 140591004393440) # for index_block, block in enumerate(self.single_transformer_blocks): # diffusers/src/diffusers/models/transformers/transformer_flux.py:509 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37]._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].attn, accessed_by=DictGetItemGuardAccessor(attn) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[37].attn, 140581770788672) # attn_output = self.attn( # diffusers/src/diffusers/models/transformers/transformer_flux.py:91 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].attn.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[37].attn.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].attn.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[37].attn.training, 140591004393440) # attn_output = self.attn( # diffusers/src/diffusers/models/transformers/transformer_flux.py:91 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].attn._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].attn.to_k, accessed_by=DictGetItemGuardAccessor(to_k) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[37].attn.to_k, 140531273524144) # key = attn.to_k(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1717 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].attn.to_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[37].attn.to_k.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].attn.to_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[37].attn.to_k.training, 140591004393408) # key = attn.to_k(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1717 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].attn.to_k._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].attn.to_k.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[37].attn.to_k.lora_A, 140531273523520) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].attn.to_k.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].attn.to_k.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[37].attn.to_k.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].attn.to_k.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[37].attn.to_k.lora_A['default_0'], 140531273521984) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].attn.to_k.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].attn.to_k.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[37].attn.to_k.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].attn.to_k.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].attn.to_k.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[37].attn.to_k.lora_A['default_0'].weight, 140526665920848) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].attn.to_k.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[37].attn.to_k.lora_B, 140531273524336) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].attn.to_k.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].attn.to_k.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[37].attn.to_k.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].attn.to_k.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[37].attn.to_k.lora_B['default_0'], 140531273522560) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].attn.to_k.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].attn.to_k.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[37].attn.to_k.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].attn.to_k.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[37].attn.to_k.base_layer, 140581765087296) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].attn.to_k.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].attn.to_k.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[37].attn.to_k.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].attn.to_k.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[37].attn.to_k.lora_dropout, 140531273524096) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].attn.to_k.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].attn.to_k.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[37].attn.to_k.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].attn.to_k.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[37].attn.to_k.lora_dropout['default_0'], 140531273523616) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].attn.to_k.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].attn.to_k.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[37].attn.to_k.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].attn.to_k.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[37].attn.to_k.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[37].attn.to_k.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].attn.to_k.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[37].attn.to_k.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].attn.to_k.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[37].attn.to_k.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[37].attn.to_k.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].attn.to_k.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[37].attn.to_k.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].attn.to_k._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].attn.to_k._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].attn.to_k.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[37].attn.to_k.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[37].attn.to_k.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].attn.to_k._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[37].attn.to_k._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].attn.to_k._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].attn.to_k._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].attn.to_k._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[37].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[37].attn.to_k._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].attn.to_q, accessed_by=DictGetItemGuardAccessor(to_q) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[37].attn.to_q, 140531273392688) # query = attn.to_q(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1716 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].attn.to_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[37].attn.to_q.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].attn.to_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[37].attn.to_q.training, 140591004393408) # query = attn.to_q(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1716 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].attn.to_q._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].attn.to_q.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[37].attn.to_q.lora_A, 140531273394608) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].attn.to_q.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].attn.to_q.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[37].attn.to_q.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].attn.to_q.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[37].attn.to_q.lora_A['default_0'], 140531273521696) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].attn.to_q.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].attn.to_q.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[37].attn.to_q.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].attn.to_q.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].attn.to_q.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[37].attn.to_q.lora_A['default_0'].weight, 140526665908608) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].attn.to_q.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[37].attn.to_q.lora_B, 140531273393024) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].attn.to_q.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].attn.to_q.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[37].attn.to_q.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].attn.to_q.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[37].attn.to_q.lora_B['default_0'], 140531273522224) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].attn.to_q.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].attn.to_q.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[37].attn.to_q.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].attn.to_q.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[37].attn.to_q.base_layer, 140581765087344) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].attn.to_q.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].attn.to_q.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[37].attn.to_q.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].attn.to_q.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[37].attn.to_q.lora_dropout, 140531273390768) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].attn.to_q.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].attn.to_q.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[37].attn.to_q.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].attn.to_q.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[37].attn.to_q.lora_dropout['default_0'], 140531273394368) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].attn.to_q.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].attn.to_q.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[37].attn.to_q.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].attn.to_q.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[37].attn.to_q.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[37].attn.to_q.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].attn.to_q.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[37].attn.to_q.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].attn.to_q.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[37].attn.to_q.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[37].attn.to_q.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].attn.to_q.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[37].attn.to_q.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].attn.to_q._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].attn.to_q._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].attn.to_q.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[37].attn.to_q.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[37].attn.to_q.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].attn.to_q._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[37].attn.to_q._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].attn.to_q._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].attn.to_q._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].attn.to_q._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[37].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[37].attn.to_q._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].attn.to_v, accessed_by=DictGetItemGuardAccessor(to_v) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[37].attn.to_v, 140531273525008) # value = attn.to_v(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1718 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].attn.to_v.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[37].attn.to_v.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].attn.to_v.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[37].attn.to_v.training, 140591004393408) # value = attn.to_v(hidden_states) # diffusers/src/diffusers/models/attention_processor.py:1718 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].attn.to_v._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].attn.to_v.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[37].attn.to_v.lora_A, 140531273525536) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].attn.to_v.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].attn.to_v.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[37].attn.to_v.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].attn.to_v.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[37].attn.to_v.lora_A['default_0'], 140531273527024) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].attn.to_v.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].attn.to_v.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[37].attn.to_v.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].attn.to_v.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].attn.to_v.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[37].attn.to_v.lora_A['default_0'].weight, 140526665910528) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].attn.to_v.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[37].attn.to_v.lora_B, 140531273526256) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].attn.to_v.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].attn.to_v.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[37].attn.to_v.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].attn.to_v.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[37].attn.to_v.lora_B['default_0'], 140531273526880) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].attn.to_v.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].attn.to_v.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[37].attn.to_v.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].attn.to_v.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[37].attn.to_v.base_layer, 140581765087392) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].attn.to_v.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].attn.to_v.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[37].attn.to_v.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].attn.to_v.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[37].attn.to_v.lora_dropout, 140531273525152) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].attn.to_v.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].attn.to_v.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[37].attn.to_v.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].attn.to_v.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[37].attn.to_v.lora_dropout['default_0'], 140531273525392) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].attn.to_v.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].attn.to_v.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[37].attn.to_v.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].attn.to_v.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[37].attn.to_v.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[37].attn.to_v.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].attn.to_v.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[37].attn.to_v.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].attn.to_v.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[37].attn.to_v.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[37].attn.to_v.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].attn.to_v.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[37].attn.to_v.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].attn.to_v._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].attn.to_v._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].attn.to_v.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[37].attn.to_v.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[37].attn.to_v.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].attn.to_v._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[37].attn.to_v._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].attn.to_v._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].attn.to_v._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].attn.to_v._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[37].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[37].attn.to_v._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].attn.norm_k, accessed_by=DictGetItemGuardAccessor(norm_k) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[37].attn.norm_k, 140581770788816) # if attn.norm_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1729 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].attn.norm_k.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[37].attn.norm_k.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].attn.norm_k.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[37].attn.norm_k.training, 140591004393440) # if attn.norm_k is not None: # diffusers/src/diffusers/models/attention_processor.py:1729 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].attn.norm_k.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[37].attn.norm_k.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].attn.norm_k._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].attn.norm_k.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[37].attn.norm_k.weight, 140581766113424) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].attn.norm_k._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].attn.norm_k._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].attn.norm_k._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].attn.norm_k._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].attn.norm_q, accessed_by=DictGetItemGuardAccessor(norm_q) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[37].attn.norm_q, 140581770788720) # if attn.norm_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1727 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].attn.norm_q.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[37].attn.norm_q.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].attn.norm_q.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[37].attn.norm_q.training, 140591004393440) # if attn.norm_q is not None: # diffusers/src/diffusers/models/attention_processor.py:1727 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].attn.norm_q.eps, accessed_by=DictGetItemGuardAccessor(eps) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[37].attn.norm_q.eps == 1e-06 # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].attn.norm_q._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].attn.norm_q.weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[37].attn.norm_q.weight, 140581766113904) # if self.weight is not None: # diffusers/src/diffusers/models/normalization.py:430 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].attn.norm_q._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].attn.norm_q._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].attn.norm_q._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].attn.norm_q._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].attn.heads, accessed_by=DictGetItemGuardAccessor(heads) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[37].attn.heads == 24 # head_dim = inner_dim // attn.heads # diffusers/src/diffusers/models/attention_processor.py:1721 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].attn.processor, accessed_by=DictGetItemGuardAccessor(processor) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[37].attn.processor, 93831581524080) # attn_parameters = set(inspect.signature(self.processor.__call__).parameters.keys()) # diffusers/src/diffusers/models/attention_processor.py:479 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[37].attn.processor, 140581770788624) # return self.processor( # diffusers/src/diffusers/models/attention_processor.py:490 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].attn._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].attn._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].attn._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].attn._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].attn.forward, accessed_by=GetAttrGuardAccessor(forward) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].attn.forward, accessed_by=FuncDefaultsGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].attn.forward.__defaults__[0], accessed_by=GetItemGuardAccessor(0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[37].attn.forward.__defaults__[0], 140591004478624) # batch_size, _, _ = hidden_states.shape if encoder_hidden_states is None else encoder_hidden_states.shape # diffusers/src/diffusers/models/attention_processor.py:1713 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].norm, accessed_by=DictGetItemGuardAccessor(norm) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[37].norm, 140581770788288) # norm_hidden_states, gate = self.norm(hidden_states, emb=temb) # diffusers/src/diffusers/models/transformers/transformer_flux.py:88 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].norm.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[37].norm.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].norm.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[37].norm.training, 140591004393440) # norm_hidden_states, gate = self.norm(hidden_states, emb=temb) # diffusers/src/diffusers/models/transformers/transformer_flux.py:88 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].norm._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].norm.norm, accessed_by=DictGetItemGuardAccessor(norm) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[37].norm.norm, 140581770788432) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:171 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].norm.norm.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].norm.norm.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[37].norm.norm.training, 140591004393440) # x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] # diffusers/src/diffusers/models/normalization.py:171 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].norm.silu, accessed_by=DictGetItemGuardAccessor(silu) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[37].norm.silu, 140581770788336) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].norm.silu.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].norm.silu.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[37].norm.silu.training, 140591004393440) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].norm.linear, accessed_by=DictGetItemGuardAccessor(linear) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[37].norm.linear, 140531274884784) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].norm.linear.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[37].norm.linear.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].norm.linear.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[37].norm.linear.training, 140591004393408) # emb = self.linear(self.silu(emb)) # diffusers/src/diffusers/models/normalization.py:169 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].norm.linear._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].norm.linear.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[37].norm.linear.lora_A, 140531274881280) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].norm.linear.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].norm.linear.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[37].norm.linear.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].norm.linear.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[37].norm.linear.lora_A['default_0'], 140531275379088) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].norm.linear.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].norm.linear.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[37].norm.linear.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].norm.linear.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].norm.linear.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[37].norm.linear.lora_A['default_0'].weight, 140531238119328) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].norm.linear.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[37].norm.linear.lora_B, 140531274887712) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].norm.linear.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].norm.linear.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[37].norm.linear.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].norm.linear.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[37].norm.linear.lora_B['default_0'], 140531275374624) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].norm.linear.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].norm.linear.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[37].norm.linear.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].norm.linear.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[37].norm.linear.base_layer, 140581770788384) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].norm.linear.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].norm.linear.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[37].norm.linear.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].norm.linear.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[37].norm.linear.lora_dropout, 140531274880320) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].norm.linear.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].norm.linear.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[37].norm.linear.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].norm.linear.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[37].norm.linear.lora_dropout['default_0'], 140531274879600) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].norm.linear.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].norm.linear.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[37].norm.linear.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].norm.linear.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[37].norm.linear.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[37].norm.linear.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].norm.linear.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[37].norm.linear.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].norm.linear.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[37].norm.linear.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[37].norm.linear.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].norm.linear.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[37].norm.linear.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].norm.linear._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].norm.linear._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].norm.linear.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[37].norm.linear.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[37].norm.linear.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].norm.linear._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[37].norm.linear._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].norm.linear._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].norm.linear._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].norm.linear._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[37].norm.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[37].norm.linear._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].norm._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].norm._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].norm._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].norm._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].act_mlp, accessed_by=DictGetItemGuardAccessor(act_mlp) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[37].act_mlp, 140581770788528) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].act_mlp.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].act_mlp.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[37].act_mlp.training, 140591004393440) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].proj_mlp, accessed_by=DictGetItemGuardAccessor(proj_mlp) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[37].proj_mlp, 140531275379856) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].proj_mlp.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[37].proj_mlp.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].proj_mlp.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[37].proj_mlp.training, 140591004393408) # mlp_hidden_states = self.act_mlp(self.proj_mlp(norm_hidden_states)) # diffusers/src/diffusers/models/transformers/transformer_flux.py:89 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].proj_mlp._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].proj_mlp.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[37].proj_mlp.lora_A, 140531273395616) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].proj_mlp.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].proj_mlp.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[37].proj_mlp.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].proj_mlp.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[37].proj_mlp.lora_A['default_0'], 140531273395808) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].proj_mlp.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].proj_mlp.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[37].proj_mlp.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].proj_mlp.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].proj_mlp.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[37].proj_mlp.lora_A['default_0'].weight, 140531238118928) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].proj_mlp.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[37].proj_mlp.lora_B, 140531273389472) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].proj_mlp.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].proj_mlp.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[37].proj_mlp.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].proj_mlp.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[37].proj_mlp.lora_B['default_0'], 140531273392880) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].proj_mlp.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].proj_mlp.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[37].proj_mlp.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].proj_mlp.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[37].proj_mlp.base_layer, 140581770788480) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].proj_mlp.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].proj_mlp.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[37].proj_mlp.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].proj_mlp.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[37].proj_mlp.lora_dropout, 140531275381104) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].proj_mlp.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].proj_mlp.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[37].proj_mlp.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].proj_mlp.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[37].proj_mlp.lora_dropout['default_0'], 140531275376256) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].proj_mlp.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].proj_mlp.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[37].proj_mlp.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].proj_mlp.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[37].proj_mlp.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[37].proj_mlp.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].proj_mlp.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[37].proj_mlp.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].proj_mlp.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[37].proj_mlp.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[37].proj_mlp.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].proj_mlp.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[37].proj_mlp.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].proj_mlp._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].proj_mlp._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].proj_mlp.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[37].proj_mlp.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[37].proj_mlp.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].proj_mlp._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[37].proj_mlp._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].proj_mlp._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].proj_mlp._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].proj_mlp._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[37].proj_mlp._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[37].proj_mlp._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].proj_out, accessed_by=DictGetItemGuardAccessor(proj_out) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[37].proj_out, 140531273395568) # hidden_states = gate * self.proj_out(hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:98 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].proj_out.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- DICT_CONTAINS: not ___dict_contains('forward', L['self'].single_transformer_blocks[37].proj_out.__dict__) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].proj_out.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[37].proj_out.training, 140591004393408) # hidden_states = gate * self.proj_out(hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:98 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].proj_out._modules, accessed_by=DictGetItemGuardAccessor(_modules) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].proj_out.lora_A, accessed_by=DictGetItemGuardAccessor(lora_A) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[37].proj_out.lora_A, 140531273396240) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].proj_out.lora_A.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].proj_out.lora_A.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[37].proj_out.lora_A.training, 140591004393408) # if active_adapter not in self.lora_A.keys(): # peft/tuners/lora/layer.py:560 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].proj_out.lora_A['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[37].proj_out.lora_A['default_0'], 140531273389376) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].proj_out.lora_A['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].proj_out.lora_A['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[37].proj_out.lora_A['default_0'].training, 140591004393408) # lora_A = self.lora_A[active_adapter] # peft/tuners/lora/layer.py:562 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].proj_out.lora_A['default_0']._parameters, accessed_by=DictGetItemGuardAccessor(_parameters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].proj_out.lora_A['default_0'].weight, accessed_by=DictGetItemGuardAccessor(weight) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[37].proj_out.lora_A['default_0'].weight, 140526665924128) # x = x.to(lora_A.weight.dtype) # peft/tuners/lora/layer.py:566 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].proj_out.lora_B, accessed_by=DictGetItemGuardAccessor(lora_B) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[37].proj_out.lora_B, 140531273385104) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].proj_out.lora_B.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].proj_out.lora_B.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[37].proj_out.lora_B.training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].proj_out.lora_B['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[37].proj_out.lora_B['default_0'], 140531273387552) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].proj_out.lora_B['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].proj_out.lora_B['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[37].proj_out.lora_B['default_0'].training, 140591004393408) # lora_B = self.lora_B[active_adapter] # peft/tuners/lora/layer.py:563 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].proj_out.base_layer, accessed_by=DictGetItemGuardAccessor(base_layer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[37].proj_out.base_layer, 140581770788576) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].proj_out.base_layer.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].proj_out.base_layer.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[37].proj_out.base_layer.training, 140591004393440) # result = self.base_layer(x, *args, **kwargs) # peft/tuners/lora/layer.py:557 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].proj_out.lora_dropout, accessed_by=DictGetItemGuardAccessor(lora_dropout) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[37].proj_out.lora_dropout, 140531273389808) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].proj_out.lora_dropout.__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].proj_out.lora_dropout.training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[37].proj_out.lora_dropout.training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].proj_out.lora_dropout['default_0'], accessed_by=GetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[37].proj_out.lora_dropout['default_0'], 140531273397344) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].proj_out.lora_dropout['default_0'].__dict__, accessed_by=GetGenericDictGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].proj_out.lora_dropout['default_0'].training, accessed_by=DictGetItemGuardAccessor(training) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[37].proj_out.lora_dropout['default_0'].training, 140591004393408) # dropout = self.lora_dropout[active_adapter] # peft/tuners/lora/layer.py:564 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].proj_out.scaling, accessed_by=DictGetItemGuardAccessor(scaling) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[37].proj_out.scaling, 140591004466944) # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[37].proj_out.scaling) == 1 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].proj_out.scaling['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- EQUALS_MATCH: L['self'].single_transformer_blocks[37].proj_out.scaling['default_0'] == 1.0 # scaling = self.scaling[active_adapter] # peft/tuners/lora/layer.py:565 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].proj_out.use_dora, accessed_by=DictGetItemGuardAccessor(use_dora) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[37].proj_out.use_dora, 140591004466944) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- DICT_LENGTH: len(L['self'].single_transformer_blocks[37].proj_out.use_dora) == 1 # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].proj_out.use_dora['default_0'], accessed_by=DictGetItemGuardAccessor(default_0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[37].proj_out.use_dora['default_0'], 140591004393440) # if not self.use_dora[active_adapter]: # peft/tuners/lora/layer.py:568 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].proj_out._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].proj_out._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].proj_out.merged_adapters, accessed_by=DictGetItemGuardAccessor(merged_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TYPE_MATCH: ___check_type_id(L['self'].single_transformer_blocks[37].proj_out.merged_adapters, 140591004458752) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- LENGTH_CHECK: not L['self'].single_transformer_blocks[37].proj_out.merged_adapters # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].proj_out._disable_adapters, accessed_by=DictGetItemGuardAccessor(_disable_adapters) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- ID_MATCH: ___check_obj_id(L['self'].single_transformer_blocks[37].proj_out._disable_adapters, 140591004393440) # return self._disable_adapters # peft/tuners/tuners_utils.py:511 in disable_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].proj_out._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].proj_out._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37].proj_out._active_adapter, accessed_by=DictGetItemGuardAccessor(_active_adapter) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[37].proj_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | | | | | +- TENSOR_ALIASING: L['self'].transformer_blocks[0].norm1.linear._active_adapter is L['self'].single_transformer_blocks[37].proj_out._active_adapter # return self._active_adapter # peft/tuners/tuners_utils.py:516 in active_adapter V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37]._forward_hooks, accessed_by=DictGetItemGuardAccessor(_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37]._backward_hooks, accessed_by=DictGetItemGuardAccessor(_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37]._forward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | | +- GuardManager: source=L['self'].single_transformer_blocks[37]._backward_pre_hooks, accessed_by=DictGetItemGuardAccessor(_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | +- GuardManager: source=L['img_ids'], accessed_by=DictGetItemGuardAccessor(img_ids) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | +- TENSOR_MATCH: check_tensor(L['img_ids'], Tensor, DispatchKeySet(CUDA, BackendSelect, ADInplaceOrView, AutogradCUDA), torch.bfloat16, device=0, requires_grad=False, size=[4096, 3], stride=[3, 1]) # if img_ids.ndim == 3: # diffusers/src/diffusers/models/transformers/transformer_flux.py:462 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | +- NO_HASATTR: hasattr(L['img_ids'], '_dynamo_dynamic_indices') == False # if img_ids.ndim == 3: # diffusers/src/diffusers/models/transformers/transformer_flux.py:462 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | +- NO_TENSOR_ALIASING: check_no_aliasing(L['img_ids'], L['txt_ids'], L['guidance'], L['timestep'], L['hidden_states'], L['pooled_projections'], L['encoder_hidden_states']) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | +- GuardManager: source=L['txt_ids'], accessed_by=DictGetItemGuardAccessor(txt_ids) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | +- TENSOR_MATCH: check_tensor(L['txt_ids'], Tensor, DispatchKeySet(CUDA, BackendSelect, ADInplaceOrView, AutogradCUDA), torch.bfloat16, device=0, requires_grad=False, size=[512, 3], stride=[3, 1]) # if txt_ids.ndim == 3: # diffusers/src/diffusers/models/transformers/transformer_flux.py:456 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | +- NO_HASATTR: hasattr(L['txt_ids'], '_dynamo_dynamic_indices') == False # if txt_ids.ndim == 3: # diffusers/src/diffusers/models/transformers/transformer_flux.py:456 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | +- NO_TENSOR_ALIASING: check_no_aliasing(L['img_ids'], L['txt_ids'], L['guidance'], L['timestep'], L['hidden_states'], L['pooled_projections'], L['encoder_hidden_states']) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | +- GuardManager: source=L['guidance'], accessed_by=DictGetItemGuardAccessor(guidance) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | +- TENSOR_MATCH: check_tensor(L['guidance'], Tensor, DispatchKeySet(CUDA, BackendSelect, ADInplaceOrView, AutogradCUDA), torch.float32, device=0, requires_grad=False, size=[1], stride=[1]) # if guidance is not None: # diffusers/src/diffusers/models/transformers/transformer_flux.py:445 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | +- NO_HASATTR: hasattr(L['guidance'], '_dynamo_dynamic_indices') == False # if guidance is not None: # diffusers/src/diffusers/models/transformers/transformer_flux.py:445 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | +- NO_TENSOR_ALIASING: check_no_aliasing(L['img_ids'], L['txt_ids'], L['guidance'], L['timestep'], L['hidden_states'], L['pooled_projections'], L['encoder_hidden_states']) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | +- GuardManager: source=L['timestep'], accessed_by=DictGetItemGuardAccessor(timestep) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | +- TENSOR_MATCH: check_tensor(L['timestep'], Tensor, DispatchKeySet(CUDA, BackendSelect, ADInplaceOrView, AutogradCUDA), torch.bfloat16, device=0, requires_grad=False, size=[1], stride=[1]) # timestep = timestep.to(hidden_states.dtype) * 1000 # diffusers/src/diffusers/models/transformers/transformer_flux.py:444 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | +- NO_HASATTR: hasattr(L['timestep'], '_dynamo_dynamic_indices') == False # timestep = timestep.to(hidden_states.dtype) * 1000 # diffusers/src/diffusers/models/transformers/transformer_flux.py:444 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | +- NO_TENSOR_ALIASING: check_no_aliasing(L['img_ids'], L['txt_ids'], L['guidance'], L['timestep'], L['hidden_states'], L['pooled_projections'], L['encoder_hidden_states']) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | +- GuardManager: source=L['return_dict'], accessed_by=DictGetItemGuardAccessor(return_dict) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | +- ID_MATCH: ___check_obj_id(L['return_dict'], 140591004393440) # if not return_dict: # diffusers/src/diffusers/models/transformers/transformer_flux.py:555 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | +- GuardManager: source=L['hidden_states'], accessed_by=DictGetItemGuardAccessor(hidden_states) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | +- TENSOR_MATCH: check_tensor(L['hidden_states'], Tensor, DispatchKeySet(CUDA, BackendSelect, ADInplaceOrView, AutogradCUDA), torch.bfloat16, device=0, requires_grad=False, size=[1, 4096, 64], stride=[262144, 64, 1]) # hidden_states = self.x_embedder(hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:442 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | +- NO_HASATTR: hasattr(L['hidden_states'], '_dynamo_dynamic_indices') == False # hidden_states = self.x_embedder(hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:442 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | +- NO_TENSOR_ALIASING: check_no_aliasing(L['img_ids'], L['txt_ids'], L['guidance'], L['timestep'], L['hidden_states'], L['pooled_projections'], L['encoder_hidden_states']) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | +- GuardManager: source=L['pooled_projections'], accessed_by=DictGetItemGuardAccessor(pooled_projections) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | +- TENSOR_MATCH: check_tensor(L['pooled_projections'], Tensor, DispatchKeySet(CUDA, BackendSelect, ADInplaceOrView, AutogradCUDA), torch.bfloat16, device=0, requires_grad=False, size=[1, 768], stride=[768, 1]) # timesteps_emb = self.timestep_embedder(timesteps_proj.to(dtype=pooled_projection.dtype)) # (N, D) # diffusers/src/diffusers/models/embeddings.py:1060 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | +- NO_HASATTR: hasattr(L['pooled_projections'], '_dynamo_dynamic_indices') == False # timesteps_emb = self.timestep_embedder(timesteps_proj.to(dtype=pooled_projection.dtype)) # (N, D) # diffusers/src/diffusers/models/embeddings.py:1060 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | +- NO_TENSOR_ALIASING: check_no_aliasing(L['img_ids'], L['txt_ids'], L['guidance'], L['timestep'], L['hidden_states'], L['pooled_projections'], L['encoder_hidden_states']) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | +- GuardManager: source=L['encoder_hidden_states'], accessed_by=DictGetItemGuardAccessor(encoder_hidden_states) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | +- TENSOR_MATCH: check_tensor(L['encoder_hidden_states'], Tensor, DispatchKeySet(CUDA, BackendSelect, ADInplaceOrView, AutogradCUDA), torch.bfloat16, device=0, requires_grad=False, size=[1, 512, 4096], stride=[2097152, 4096, 1]) # encoder_hidden_states = self.context_embedder(encoder_hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:454 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | +- NO_HASATTR: hasattr(L['encoder_hidden_states'], '_dynamo_dynamic_indices') == False # encoder_hidden_states = self.context_embedder(encoder_hidden_states) # diffusers/src/diffusers/models/transformers/transformer_flux.py:454 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | +- NO_TENSOR_ALIASING: check_no_aliasing(L['img_ids'], L['txt_ids'], L['guidance'], L['timestep'], L['hidden_states'], L['pooled_projections'], L['encoder_hidden_states']) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | +- GuardManager: source=L['joint_attention_kwargs'], accessed_by=DictGetItemGuardAccessor(joint_attention_kwargs) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | +- ID_MATCH: ___check_obj_id(L['joint_attention_kwargs'], 140591004478624) # if joint_attention_kwargs is not None: # diffusers/src/diffusers/models/transformers/transformer_flux.py:428 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | +- GuardManager: source=L['controlnet_block_samples'], accessed_by=DictGetItemGuardAccessor(controlnet_block_samples) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | +- ID_MATCH: ___check_obj_id(L['controlnet_block_samples'], 140591004478624) # if controlnet_block_samples is not None: # diffusers/src/diffusers/models/transformers/transformer_flux.py:502 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | +- GuardManager: source=L['controlnet_single_block_samples'], accessed_by=DictGetItemGuardAccessor(controlnet_single_block_samples) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | +- ID_MATCH: ___check_obj_id(L['controlnet_single_block_samples'], 140591004478624) # if controlnet_single_block_samples is not None: # diffusers/src/diffusers/models/transformers/transformer_flux.py:538 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | +- GuardManager: source=G, accessed_by=GlobalsGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | +- GuardManager: source=G['torch'], accessed_by=DictGetItemGuardAccessor(torch) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | +- ID_MATCH: ___check_obj_id(G['torch'], 140590979095808) # ids = torch.cat((txt_ids, img_ids), dim=0) # diffusers/src/diffusers/models/transformers/transformer_flux.py:468 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | +- GuardManager: source=G['torch'].cat, accessed_by=GetAttrGuardAccessor(cat) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | +- ID_MATCH: ___check_obj_id(G['torch'].cat, 140590976095136) # ids = torch.cat((txt_ids, img_ids), dim=0) # diffusers/src/diffusers/models/transformers/transformer_flux.py:468 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | +- GuardManager: source=G['torch'].float16, accessed_by=GetAttrGuardAccessor(float16) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | +- EQUALS_MATCH: G['torch'].float16 == torch.float16 # if encoder_hidden_states.dtype == torch.float16: # diffusers/src/diffusers/models/transformers/transformer_flux.py:200 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | +- GuardManager: source=G['USE_PEFT_BACKEND'], accessed_by=DictGetItemGuardAccessor(USE_PEFT_BACKEND) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | +- ID_MATCH: ___check_obj_id(G['USE_PEFT_BACKEND'], 140591004393408) # if USE_PEFT_BACKEND: # diffusers/src/diffusers/models/transformers/transformer_flux.py:434 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | +- GuardManager: source=G['scale_lora_layers'], accessed_by=DictGetItemGuardAccessor(scale_lora_layers) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | +- GuardManager: source=G['scale_lora_layers'].__code__, accessed_by=GetAttrGuardAccessor(__code__) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | +- ID_MATCH: ___check_obj_id(G['scale_lora_layers'].__code__, 140585209572752) # scale_lora_layers(self, lora_scale) # diffusers/src/diffusers/models/transformers/transformer_flux.py:436 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | +- GuardManager: source=G['unscale_lora_layers'], accessed_by=DictGetItemGuardAccessor(unscale_lora_layers) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | +- GuardManager: source=G['unscale_lora_layers'].__code__, accessed_by=GetAttrGuardAccessor(__code__) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | +- ID_MATCH: ___check_obj_id(G['unscale_lora_layers'].__code__, 140585209572928) # unscale_lora_layers(self, lora_scale) # diffusers/src/diffusers/models/transformers/transformer_flux.py:553 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | +- GuardManager: source=G['__builtins_dict___6'], accessed_by=DictGetItemGuardAccessor(__builtins_dict___6) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | +- GuardManager: source=G['__builtins_dict___6']['int'], accessed_by=DictGetItemGuardAccessor(int) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | +- ID_MATCH: ___check_obj_id(G['__builtins_dict___6']['int'], 140591004461248) # if isinstance(pos, int): # diffusers/src/diffusers/models/embeddings.py:605 in get_1d_rotary_pos_embed V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | +- GuardManager: source=G['__builtins_dict___6']['len'], accessed_by=DictGetItemGuardAccessor(len) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | +- ID_MATCH: ___check_obj_id(G['__builtins_dict___6']['len'], 140590981894672) # assert len(timesteps.shape) == 1, "Timesteps should be a 1d-array" # diffusers/src/diffusers/models/embeddings.py:54 in get_timestep_embedding V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | +- GuardManager: source=G['__builtins_dict___6']['set'], accessed_by=DictGetItemGuardAccessor(set) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | +- ID_MATCH: ___check_obj_id(G['__builtins_dict___6']['set'], 140591004484896) # attn_parameters = set(inspect.signature(self.processor.__call__).parameters.keys()) # diffusers/src/diffusers/models/attention_processor.py:479 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | +- GuardManager: source=G['__builtins_dict___6']['str'], accessed_by=DictGetItemGuardAccessor(str) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | +- ID_MATCH: ___check_obj_id(G['__builtins_dict___6']['str'], 140591004503168) # if isinstance(self.active_adapter, str): # peft/tuners/tuners_utils.py:530 in active_adapters V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | +- GuardManager: source=G['__builtins_dict___6']['bool'], accessed_by=DictGetItemGuardAccessor(bool) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | +- ID_MATCH: ___check_obj_id(G['__builtins_dict___6']['bool'], 140591004393472) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | +- GuardManager: source=G['__builtins_dict___6']['range'], accessed_by=DictGetItemGuardAccessor(range) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | +- ID_MATCH: ___check_obj_id(G['__builtins_dict___6']['range'], 140591004481376) # for i in range(n_axes): # diffusers/src/diffusers/models/embeddings.py:696 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | +- GuardManager: source=G['__builtins_dict___6']['enumerate'], accessed_by=DictGetItemGuardAccessor(enumerate) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | +- ID_MATCH: ___check_obj_id(G['__builtins_dict___6']['enumerate'], 140591004413056) # for index_block, block in enumerate(self.transformer_blocks): # diffusers/src/diffusers/models/transformers/transformer_flux.py:471 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | +- GuardManager: source=G['__builtins_dict___6']['isinstance'], accessed_by=DictGetItemGuardAccessor(isinstance) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | +- ID_MATCH: ___check_obj_id(G['__builtins_dict___6']['isinstance'], 140590981894352) # if isinstance(pos, int): # diffusers/src/diffusers/models/embeddings.py:605 in get_1d_rotary_pos_embed V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | +- GuardManager: source=G['__import_peft_dot_tuners_dot_tuners_utils'], accessed_by=DictGetItemGuardAccessor(__import_peft_dot_tuners_dot_tuners_utils) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | +- ID_MATCH: ___check_obj_id(G['__import_peft_dot_tuners_dot_tuners_utils'], 140585265503648) # return bool(self.merged_adapters) # peft/tuners/tuners_utils.py:506 in merged V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | +- GuardManager: source=G['__import_peft_dot_tuners_dot_tuners_utils'].BaseTunerLayer, accessed_by=GetAttrGuardAccessor(BaseTunerLayer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | +- ID_MATCH: ___check_obj_id(G['__import_peft_dot_tuners_dot_tuners_utils'].BaseTunerLayer, 93831560473968) # from peft.tuners.tuners_utils import BaseTunerLayer # diffusers/src/diffusers/utils/peft_utils.py:113 in scale_lora_layers V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | +- GuardManager: source=G['__import_diffusers_dot_models_dot_attention'], accessed_by=DictGetItemGuardAccessor(__import_diffusers_dot_models_dot_attention) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | +- ID_MATCH: ___check_obj_id(G['__import_diffusers_dot_models_dot_attention'], 140585077988688) # if len(args) > 0 or kwargs.get("scale", None) is not None: # diffusers/src/diffusers/models/attention.py:1197 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | +- GuardManager: source=G['__import_diffusers_dot_models_dot_embeddings'], accessed_by=DictGetItemGuardAccessor(__import_diffusers_dot_models_dot_embeddings) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | +- ID_MATCH: ___check_obj_id(G['__import_diffusers_dot_models_dot_embeddings'], 140585079518960) # t_emb = get_timestep_embedding( # diffusers/src/diffusers/models/embeddings.py:764 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | +- GuardManager: source=G['__import_diffusers_dot_models_dot_embeddings'].np, accessed_by=GetAttrGuardAccessor(np) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | +- ID_MATCH: ___check_obj_id(G['__import_diffusers_dot_models_dot_embeddings'].np, 140590976137424) # if isinstance(pos, np.ndarray): # diffusers/src/diffusers/models/embeddings.py:607 in get_1d_rotary_pos_embed V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | +- GuardManager: source=G['__import_diffusers_dot_models_dot_embeddings'].np.ndarray, accessed_by=GetAttrGuardAccessor(ndarray) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | +- ID_MATCH: ___check_obj_id(G['__import_diffusers_dot_models_dot_embeddings'].np.ndarray, 140588028923008) # if isinstance(pos, np.ndarray): # diffusers/src/diffusers/models/embeddings.py:607 in get_1d_rotary_pos_embed V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | +- GuardManager: source=G['__import_diffusers_dot_models_dot_embeddings'].math, accessed_by=GetAttrGuardAccessor(math) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | +- ID_MATCH: ___check_obj_id(G['__import_diffusers_dot_models_dot_embeddings'].math, 140590979232480) # exponent = -math.log(max_period) * torch.arange( # diffusers/src/diffusers/models/embeddings.py:57 in get_timestep_embedding V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | +- GuardManager: source=G['__import_diffusers_dot_models_dot_embeddings'].math.log, accessed_by=GetAttrGuardAccessor(log) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | +- ID_MATCH: ___check_obj_id(G['__import_diffusers_dot_models_dot_embeddings'].math.log, 140590979235360) # exponent = -math.log(max_period) * torch.arange( # diffusers/src/diffusers/models/embeddings.py:57 in get_timestep_embedding V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | +- GuardManager: source=G['__import_diffusers_dot_models_dot_embeddings'].torch, accessed_by=GetAttrGuardAccessor(torch) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | +- ID_MATCH: ___check_obj_id(G['__import_diffusers_dot_models_dot_embeddings'].torch, 140590979095808) # exponent = -math.log(max_period) * torch.arange( # diffusers/src/diffusers/models/embeddings.py:57 in get_timestep_embedding V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | +- GuardManager: source=G['__import_diffusers_dot_models_dot_embeddings'].torch.cat, accessed_by=GetAttrGuardAccessor(cat) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | +- ID_MATCH: ___check_obj_id(G['__import_diffusers_dot_models_dot_embeddings'].torch.cat, 140590976095136) # emb = torch.cat([torch.sin(emb), torch.cos(emb)], dim=-1) # diffusers/src/diffusers/models/embeddings.py:69 in get_timestep_embedding V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | +- GuardManager: source=G['__import_diffusers_dot_models_dot_embeddings'].torch.cos, accessed_by=GetAttrGuardAccessor(cos) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | +- ID_MATCH: ___check_obj_id(G['__import_diffusers_dot_models_dot_embeddings'].torch.cos, 140590976096336) # emb = torch.cat([torch.sin(emb), torch.cos(emb)], dim=-1) # diffusers/src/diffusers/models/embeddings.py:69 in get_timestep_embedding V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | +- GuardManager: source=G['__import_diffusers_dot_models_dot_embeddings'].torch.exp, accessed_by=GetAttrGuardAccessor(exp) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | +- ID_MATCH: ___check_obj_id(G['__import_diffusers_dot_models_dot_embeddings'].torch.exp, 140590976097696) # emb = torch.exp(exponent) # diffusers/src/diffusers/models/embeddings.py:62 in get_timestep_embedding V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | +- GuardManager: source=G['__import_diffusers_dot_models_dot_embeddings'].torch.sin, accessed_by=GetAttrGuardAccessor(sin) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | +- ID_MATCH: ___check_obj_id(G['__import_diffusers_dot_models_dot_embeddings'].torch.sin, 140590976106096) # emb = torch.cat([torch.sin(emb), torch.cos(emb)], dim=-1) # diffusers/src/diffusers/models/embeddings.py:69 in get_timestep_embedding V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | +- GuardManager: source=G['__import_diffusers_dot_models_dot_embeddings'].torch.outer, accessed_by=GetAttrGuardAccessor(outer) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | +- ID_MATCH: ___check_obj_id(G['__import_diffusers_dot_models_dot_embeddings'].torch.outer, 140590976134544) # freqs = torch.outer(pos, freqs) # type: ignore # [S, D/2] # diffusers/src/diffusers/models/embeddings.py:616 in get_1d_rotary_pos_embed V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | +- GuardManager: source=G['__import_diffusers_dot_models_dot_embeddings'].torch.stack, accessed_by=GetAttrGuardAccessor(stack) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | +- ID_MATCH: ___check_obj_id(G['__import_diffusers_dot_models_dot_embeddings'].torch.stack, 140590976059488) # x_rotated = torch.stack([-x_imag, x_real], dim=-1).flatten(3) # diffusers/src/diffusers/models/embeddings.py:662 in apply_rotary_emb V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | +- GuardManager: source=G['__import_diffusers_dot_models_dot_embeddings'].torch.arange, accessed_by=GetAttrGuardAccessor(arange) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | +- ID_MATCH: ___check_obj_id(G['__import_diffusers_dot_models_dot_embeddings'].torch.arange, 140590975983808) # exponent = -math.log(max_period) * torch.arange( # diffusers/src/diffusers/models/embeddings.py:57 in get_timestep_embedding V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | +- GuardManager: source=G['__import_diffusers_dot_models_dot_embeddings'].torch.float32, accessed_by=GetAttrGuardAccessor(float32) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | +- EQUALS_MATCH: G['__import_diffusers_dot_models_dot_embeddings'].torch.float32 == torch.float32 # start=0, end=half_dim, dtype=torch.float32, device=timesteps.device # diffusers/src/diffusers/models/embeddings.py:58 in get_timestep_embedding V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | +- GuardManager: source=G['__import_diffusers_dot_models_dot_embeddings'].torch.float64, accessed_by=GetAttrGuardAccessor(float64) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | +- EQUALS_MATCH: G['__import_diffusers_dot_models_dot_embeddings'].torch.float64 == torch.float64 # freqs_dtype = torch.float32 if is_mps else torch.float64 # diffusers/src/diffusers/models/embeddings.py:695 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | +- GuardManager: source=G['__import_diffusers_dot_models_dot_embeddings'].apply_rotary_emb, accessed_by=GetAttrGuardAccessor(apply_rotary_emb) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | +- GuardManager: source=G['__import_diffusers_dot_models_dot_embeddings'].apply_rotary_emb.__code__, accessed_by=GetAttrGuardAccessor(__code__) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | +- ID_MATCH: ___check_obj_id(G['__import_diffusers_dot_models_dot_embeddings'].apply_rotary_emb.__code__, 140585079325072) # from .embeddings import apply_rotary_emb # diffusers/src/diffusers/models/attention_processor.py:1760 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | +- GuardManager: source=G['__import_diffusers_dot_models_dot_embeddings'].apply_rotary_emb, accessed_by=FuncDefaultsGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | +- GuardManager: source=G['__import_diffusers_dot_models_dot_embeddings'].apply_rotary_emb.__defaults__[0], accessed_by=GetItemGuardAccessor(0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | +- ID_MATCH: ___check_obj_id(G['__import_diffusers_dot_models_dot_embeddings'].apply_rotary_emb.__defaults__[0], 140591004393408) # if use_real: # diffusers/src/diffusers/models/embeddings.py:653 in apply_rotary_emb V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | +- GuardManager: source=G['__import_diffusers_dot_models_dot_embeddings'].apply_rotary_emb.__defaults__[1], accessed_by=GetItemGuardAccessor(1) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | +- EQUALS_MATCH: G['__import_diffusers_dot_models_dot_embeddings'].apply_rotary_emb.__defaults__[1] == -1 # if use_real_unbind_dim == -1: # diffusers/src/diffusers/models/embeddings.py:659 in apply_rotary_emb V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | +- GuardManager: source=G['__import_diffusers_dot_models_dot_embeddings'].get_timestep_embedding, accessed_by=GetAttrGuardAccessor(get_timestep_embedding) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | +- GuardManager: source=G['__import_diffusers_dot_models_dot_embeddings'].get_timestep_embedding.__code__, accessed_by=GetAttrGuardAccessor(__code__) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | +- ID_MATCH: ___check_obj_id(G['__import_diffusers_dot_models_dot_embeddings'].get_timestep_embedding.__code__, 140585079245968) # t_emb = get_timestep_embedding( # diffusers/src/diffusers/models/embeddings.py:764 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | +- GuardManager: source=G['__import_diffusers_dot_models_dot_embeddings'].get_timestep_embedding, accessed_by=FuncDefaultsGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | +- GuardManager: source=G['__import_diffusers_dot_models_dot_embeddings'].get_timestep_embedding.__defaults__[3], accessed_by=GetItemGuardAccessor(3) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | +- EQUALS_MATCH: G['__import_diffusers_dot_models_dot_embeddings'].get_timestep_embedding.__defaults__[3] == 10000 # exponent = -math.log(max_period) * torch.arange( # diffusers/src/diffusers/models/embeddings.py:57 in get_timestep_embedding V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | +- GuardManager: source=G['__import_diffusers_dot_models_dot_embeddings'].get_1d_rotary_pos_embed, accessed_by=GetAttrGuardAccessor(get_1d_rotary_pos_embed) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | +- GuardManager: source=G['__import_diffusers_dot_models_dot_embeddings'].get_1d_rotary_pos_embed.__code__, accessed_by=GetAttrGuardAccessor(__code__) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | +- ID_MATCH: ___check_obj_id(G['__import_diffusers_dot_models_dot_embeddings'].get_1d_rotary_pos_embed.__code__, 140585079258816) # cos, sin = get_1d_rotary_pos_embed( # diffusers/src/diffusers/models/embeddings.py:697 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | +- GuardManager: source=G['__import_diffusers_dot_models_dot_embeddings'].get_1d_rotary_pos_embed, accessed_by=FuncDefaultsGuardAccessor V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | +- GuardManager: source=G['__import_diffusers_dot_models_dot_embeddings'].get_1d_rotary_pos_embed.__defaults__[0], accessed_by=GetItemGuardAccessor(0) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | +- EQUALS_MATCH: G['__import_diffusers_dot_models_dot_embeddings'].get_1d_rotary_pos_embed.__defaults__[0] == 10000.0 # theta = theta * ntk_factor # diffusers/src/diffusers/models/embeddings.py:610 in get_1d_rotary_pos_embed V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | +- GuardManager: source=G['__import_diffusers_dot_models_dot_embeddings'].get_1d_rotary_pos_embed.__defaults__[2], accessed_by=GetItemGuardAccessor(2) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | +- EQUALS_MATCH: G['__import_diffusers_dot_models_dot_embeddings'].get_1d_rotary_pos_embed.__defaults__[2] == 1.0 # 1.0 # diffusers/src/diffusers/models/embeddings.py:612 in get_1d_rotary_pos_embed V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | +- GuardManager: source=G['__import_diffusers_dot_models_dot_embeddings'].get_1d_rotary_pos_embed.__defaults__[3], accessed_by=GetItemGuardAccessor(3) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | +- EQUALS_MATCH: G['__import_diffusers_dot_models_dot_embeddings'].get_1d_rotary_pos_embed.__defaults__[3] == 1.0 # theta = theta * ntk_factor # diffusers/src/diffusers/models/embeddings.py:610 in get_1d_rotary_pos_embed V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | +- GuardManager: source=G['__import_torch_dot_nn_dot_modules_dot_module'], accessed_by=DictGetItemGuardAccessor(__import_torch_dot_nn_dot_modules_dot_module) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | +- ID_MATCH: ___check_obj_id(G['__import_torch_dot_nn_dot_modules_dot_module'], 140585322849888) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | +- GuardManager: source=G['__import_torch_dot_nn_dot_modules_dot_module'].torch, accessed_by=GetAttrGuardAccessor(torch) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | +- ID_MATCH: ___check_obj_id(G['__import_torch_dot_nn_dot_modules_dot_module'].torch, 140590979095808) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | +- GuardManager: source=G['__import_torch_dot_nn_dot_modules_dot_module'].torch._C, accessed_by=GetAttrGuardAccessor(_C) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | +- ID_MATCH: ___check_obj_id(G['__import_torch_dot_nn_dot_modules_dot_module'].torch._C, 140590975498928) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | +- GuardManager: source=G['__import_torch_dot_nn_dot_modules_dot_module'].torch._C._get_tracing_state, accessed_by=GetAttrGuardAccessor(_get_tracing_state) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | +- ID_MATCH: ___check_obj_id(G['__import_torch_dot_nn_dot_modules_dot_module'].torch._C._get_tracing_state, 140585327896000) # forward_call = (self._slow_forward if torch._C._get_tracing_state() else self.forward) # nn/modules/module.py:1556 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | +- GuardManager: source=G['__import_torch_dot_nn_dot_modules_dot_module']._global_forward_hooks, accessed_by=GetAttrGuardAccessor(_global_forward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | +- TYPE_MATCH: ___check_type_id(G['__import_torch_dot_nn_dot_modules_dot_module']._global_forward_hooks, 140591004471168) # or _global_forward_hooks or _global_forward_pre_hooks): # nn/modules/module.py:1561 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | +- DICT_LENGTH: not G['__import_torch_dot_nn_dot_modules_dot_module']._global_forward_hooks # or _global_forward_hooks or _global_forward_pre_hooks): # nn/modules/module.py:1561 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | +- GuardManager: source=G['__import_torch_dot_nn_dot_modules_dot_module']._global_backward_hooks, accessed_by=GetAttrGuardAccessor(_global_backward_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | +- TYPE_MATCH: ___check_type_id(G['__import_torch_dot_nn_dot_modules_dot_module']._global_backward_hooks, 140591004471168) # or _global_backward_pre_hooks or _global_backward_hooks # nn/modules/module.py:1560 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | +- DICT_LENGTH: not G['__import_torch_dot_nn_dot_modules_dot_module']._global_backward_hooks # or _global_backward_pre_hooks or _global_backward_hooks # nn/modules/module.py:1560 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | +- GuardManager: source=G['__import_torch_dot_nn_dot_modules_dot_module']._global_forward_pre_hooks, accessed_by=GetAttrGuardAccessor(_global_forward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | +- TYPE_MATCH: ___check_type_id(G['__import_torch_dot_nn_dot_modules_dot_module']._global_forward_pre_hooks, 140591004471168) # or _global_forward_hooks or _global_forward_pre_hooks): # nn/modules/module.py:1561 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | +- DICT_LENGTH: not G['__import_torch_dot_nn_dot_modules_dot_module']._global_forward_pre_hooks # or _global_forward_hooks or _global_forward_pre_hooks): # nn/modules/module.py:1561 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | +- GuardManager: source=G['__import_torch_dot_nn_dot_modules_dot_module']._global_backward_pre_hooks, accessed_by=GetAttrGuardAccessor(_global_backward_pre_hooks) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | +- TYPE_MATCH: ___check_type_id(G['__import_torch_dot_nn_dot_modules_dot_module']._global_backward_pre_hooks, 140591004471168) # or _global_backward_pre_hooks or _global_backward_hooks # nn/modules/module.py:1560 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | +- DICT_LENGTH: not G['__import_torch_dot_nn_dot_modules_dot_module']._global_backward_pre_hooks # or _global_backward_pre_hooks or _global_backward_hooks # nn/modules/module.py:1560 in _call_impl V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | +- GuardManager: source=G['__import_diffusers_dot_models_dot_activations'], accessed_by=DictGetItemGuardAccessor(__import_diffusers_dot_models_dot_activations) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | +- ID_MATCH: ___check_obj_id(G['__import_diffusers_dot_models_dot_activations'], 140585079141968) # return F.gelu(gate, approximate=self.approximate) # diffusers/src/diffusers/models/activations.py:83 in gelu V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | +- GuardManager: source=G['__import_diffusers_dot_models_dot_activations'].F, accessed_by=GetAttrGuardAccessor(F) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | +- ID_MATCH: ___check_obj_id(G['__import_diffusers_dot_models_dot_activations'].F, 140585319847216) # return F.gelu(gate, approximate=self.approximate) # diffusers/src/diffusers/models/activations.py:83 in gelu V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | +- GuardManager: source=G['__import_diffusers_dot_models_dot_activations'].F.gelu, accessed_by=GetAttrGuardAccessor(gelu) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | +- ID_MATCH: ___check_obj_id(G['__import_diffusers_dot_models_dot_activations'].F.gelu, 140585328409424) # return F.gelu(gate, approximate=self.approximate) # diffusers/src/diffusers/models/activations.py:83 in gelu V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | +- GuardManager: source=G['__import_diffusers_dot_models_dot_normalization'], accessed_by=DictGetItemGuardAccessor(__import_diffusers_dot_models_dot_normalization) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | +- ID_MATCH: ___check_obj_id(G['__import_diffusers_dot_models_dot_normalization'], 140585079754240) # variance = hidden_states.to(torch.float32).pow(2).mean(-1, keepdim=True) # diffusers/src/diffusers/models/normalization.py:427 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | +- GuardManager: source=G['__import_diffusers_dot_models_dot_normalization'].torch, accessed_by=GetAttrGuardAccessor(torch) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | +- ID_MATCH: ___check_obj_id(G['__import_diffusers_dot_models_dot_normalization'].torch, 140590979095808) # variance = hidden_states.to(torch.float32).pow(2).mean(-1, keepdim=True) # diffusers/src/diffusers/models/normalization.py:427 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | +- GuardManager: source=G['__import_diffusers_dot_models_dot_normalization'].torch.chunk, accessed_by=GetAttrGuardAccessor(chunk) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | +- ID_MATCH: ___check_obj_id(G['__import_diffusers_dot_models_dot_normalization'].torch.chunk, 140590976095296) # scale, shift = torch.chunk(emb, 2, dim=1) # diffusers/src/diffusers/models/normalization.py:305 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | +- GuardManager: source=G['__import_diffusers_dot_models_dot_normalization'].torch.rsqrt, accessed_by=GetAttrGuardAccessor(rsqrt) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | +- ID_MATCH: ___check_obj_id(G['__import_diffusers_dot_models_dot_normalization'].torch.rsqrt, 140590976058128) # hidden_states = hidden_states * torch.rsqrt(variance + self.eps) # diffusers/src/diffusers/models/normalization.py:428 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | +- GuardManager: source=G['__import_diffusers_dot_models_dot_normalization'].torch.float16, accessed_by=GetAttrGuardAccessor(float16) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | +- EQUALS_MATCH: G['__import_diffusers_dot_models_dot_normalization'].torch.float16 == torch.float16 # if self.weight.dtype in [torch.float16, torch.bfloat16]: # diffusers/src/diffusers/models/normalization.py:432 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | +- GuardManager: source=G['__import_diffusers_dot_models_dot_normalization'].torch.float32, accessed_by=GetAttrGuardAccessor(float32) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | +- EQUALS_MATCH: G['__import_diffusers_dot_models_dot_normalization'].torch.float32 == torch.float32 # variance = hidden_states.to(torch.float32).pow(2).mean(-1, keepdim=True) # diffusers/src/diffusers/models/normalization.py:427 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | +- GuardManager: source=G['__import_diffusers_dot_models_dot_normalization'].torch.bfloat16, accessed_by=GetAttrGuardAccessor(bfloat16) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | +- EQUALS_MATCH: G['__import_diffusers_dot_models_dot_normalization'].torch.bfloat16 == torch.bfloat16 # if self.weight.dtype in [torch.float16, torch.bfloat16]: # diffusers/src/diffusers/models/normalization.py:432 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | +- GuardManager: source=G['__import_diffusers_dot_models_dot_attention_processor'], accessed_by=DictGetItemGuardAccessor(__import_diffusers_dot_models_dot_attention_processor) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | +- ID_MATCH: ___check_obj_id(G['__import_diffusers_dot_models_dot_attention_processor'], 140585079143248) # attn_parameters = set(inspect.signature(self.processor.__call__).parameters.keys()) # diffusers/src/diffusers/models/attention_processor.py:479 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | +- GuardManager: source=G['__import_diffusers_dot_models_dot_attention_processor'].F, accessed_by=GetAttrGuardAccessor(F) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | +- ID_MATCH: ___check_obj_id(G['__import_diffusers_dot_models_dot_attention_processor'].F, 140585319847216) # hidden_states = F.scaled_dot_product_attention(query, key, value, dropout_p=0.0, is_causal=False) # diffusers/src/diffusers/models/attention_processor.py:1765 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | +- GuardManager: source=G['__import_diffusers_dot_models_dot_attention_processor'].F.scaled_dot_product_attention, accessed_by=GetAttrGuardAccessor(scaled_dot_product_attention) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | +- ID_MATCH: ___check_obj_id(G['__import_diffusers_dot_models_dot_attention_processor'].F.scaled_dot_product_attention, 140585328298960) # hidden_states = F.scaled_dot_product_attention(query, key, value, dropout_p=0.0, is_causal=False) # diffusers/src/diffusers/models/attention_processor.py:1765 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | +- GuardManager: source=G['__import_diffusers_dot_models_dot_attention_processor'].torch, accessed_by=GetAttrGuardAccessor(torch) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | +- ID_MATCH: ___check_obj_id(G['__import_diffusers_dot_models_dot_attention_processor'].torch, 140590979095808) # query = torch.cat([encoder_hidden_states_query_proj, query], dim=2) # diffusers/src/diffusers/models/attention_processor.py:1755 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | +- GuardManager: source=G['__import_diffusers_dot_models_dot_attention_processor'].torch.cat, accessed_by=GetAttrGuardAccessor(cat) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | +- ID_MATCH: ___check_obj_id(G['__import_diffusers_dot_models_dot_attention_processor'].torch.cat, 140590976095136) # query = torch.cat([encoder_hidden_states_query_proj, query], dim=2) # diffusers/src/diffusers/models/attention_processor.py:1755 in __call__ V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | +- GuardManager: source=G['__import_diffusers_dot_models_dot_attention_processor'].inspect, accessed_by=GetAttrGuardAccessor(inspect) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | +- ID_MATCH: ___check_obj_id(G['__import_diffusers_dot_models_dot_attention_processor'].inspect, 140590979824624) # attn_parameters = set(inspect.signature(self.processor.__call__).parameters.keys()) # diffusers/src/diffusers/models/attention_processor.py:479 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | +- GuardManager: source=G['__import_diffusers_dot_models_dot_attention_processor'].inspect.signature, accessed_by=GetAttrGuardAccessor(signature) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | +- GuardManager: source=G['__import_diffusers_dot_models_dot_attention_processor'].inspect.signature.__code__, accessed_by=GetAttrGuardAccessor(__code__) V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards] | | | | | | +- ID_MATCH: ___check_obj_id(G['__import_diffusers_dot_models_dot_attention_processor'].inspect.signature.__code__, 140590977567008) # attn_parameters = set(inspect.signature(self.processor.__call__).parameters.keys()) # diffusers/src/diffusers/models/attention_processor.py:479 in forward V0909 14:45:31.053000 140590996850496 torch/_dynamo/guards.py:2148] [0/3] [__guards]